From e33a2fbedc08770c09df1c0eb6fe507ecd4753e4 Mon Sep 17 00:00:00 2001 From: DrKLO Date: Sat, 22 Aug 2020 02:59:49 +0300 Subject: [PATCH] Update to 7.0.1 (2065) --- Dockerfile | 4 +- TMessagesProj/build.gradle | 6 +- TMessagesProj/jni/TgCalls.mk | 1 + .../org_telegram_messenger_voip_Instance.cpp | 22 ++- .../jni/rlottie/src/lottie/lottieparser.cpp | 6 +- .../jni/tgcalls/CodecSelectHelper.cpp | 23 +-- TMessagesProj/jni/tgcalls/CodecSelectHelper.h | 5 +- TMessagesProj/jni/tgcalls/Instance.h | 2 + TMessagesProj/jni/tgcalls/JsonConfig.cpp | 13 ++ TMessagesProj/jni/tgcalls/JsonConfig.h | 25 +++ TMessagesProj/jni/tgcalls/Manager.cpp | 8 +- TMessagesProj/jni/tgcalls/Manager.h | 2 + TMessagesProj/jni/tgcalls/MediaManager.cpp | 23 ++- TMessagesProj/jni/tgcalls/MediaManager.h | 5 +- TMessagesProj/jni/tgcalls/NetworkManager.h | 2 +- .../jni/tgcalls/VideoCaptureInterface.h | 3 + .../jni/tgcalls/VideoCaptureInterfaceImpl.cpp | 15 +- .../jni/tgcalls/VideoCaptureInterfaceImpl.h | 2 + .../jni/tgcalls/platform/PlatformInterface.h | 6 +- .../platform/android/AndroidInterface.cpp | 56 +++---- .../platform/android/AndroidInterface.h | 6 +- .../reference/InstanceImplReference.cpp | 9 +- .../org/telegram/messenger/BuildVars.java | 2 +- .../org/telegram/messenger/ImageReceiver.java | 14 +- .../messenger/NotificationsController.java | 8 +- .../voip/CallNotificationSoundProvider.java | 15 +- .../org/telegram/messenger/voip/Instance.java | 18 ++ .../messenger/voip/VideoCameraCapturer.java | 10 ++ .../messenger/voip/VoIPBaseService.java | 6 +- .../telegram/messenger/voip/VoIPService.java | 1 + .../ui/Components/voip/AcceptDeclineView.java | 67 +++++++- .../ui/Components/voip/VoIPHelper.java | 38 +++-- .../ui/Components/voip/VoIPPiPView.java | 39 +++-- .../Components/voip/VoIPStatusTextView.java | 49 +++++- .../ui/Components/voip/VoIPToggleButton.java | 3 + .../java/org/telegram/ui/LaunchActivity.java | 2 +- .../org/telegram/ui/PaymentFormActivity.java | 2 +- .../java/org/telegram/ui/ProfileActivity.java | 7 +- .../org/telegram/ui/VoIPFeedbackActivity.java | 7 +- .../java/org/telegram/ui/VoIPFragment.java | 157 +++++++++++++----- .../src/main/java/org/webrtc/GlShader.java | 6 +- .../webrtc/HardwareVideoDecoderFactory.java | 26 ++- .../webrtc/HardwareVideoEncoderFactory.java | 82 ++------- .../res/drawable-hdpi/call_arrow_right.png | Bin 0 -> 294 bytes .../res/drawable-mdpi/call_arrow_right.png | Bin 0 -> 212 bytes .../res/drawable-xhdpi/call_arrow_right.png | Bin 0 -> 105 bytes .../res/drawable-xxhdpi/call_arrow_right.png | Bin 0 -> 516 bytes TMessagesProj/src/main/res/values/strings.xml | 5 + 48 files changed, 557 insertions(+), 251 deletions(-) create mode 100644 TMessagesProj/jni/tgcalls/JsonConfig.cpp create mode 100644 TMessagesProj/jni/tgcalls/JsonConfig.h create mode 100644 TMessagesProj/src/main/res/drawable-hdpi/call_arrow_right.png create mode 100644 TMessagesProj/src/main/res/drawable-mdpi/call_arrow_right.png create mode 100644 TMessagesProj/src/main/res/drawable-xhdpi/call_arrow_right.png create mode 100644 TMessagesProj/src/main/res/drawable-xxhdpi/call_arrow_right.png diff --git a/Dockerfile b/Dockerfile index 48b8b90ff..17b5ea46e 100644 --- a/Dockerfile +++ b/Dockerfile @@ -2,7 +2,7 @@ FROM gradle:6.1.1-jdk8 ENV ANDROID_SDK_URL https://dl.google.com/android/repository/sdk-tools-linux-3859397.zip ENV ANDROID_API_LEVEL android-30 -ENV ANDROID_BUILD_TOOLS_VERSION 30.0.1 +ENV ANDROID_BUILD_TOOLS_VERSION 30.0.2 ENV ANDROID_HOME /usr/local/android-sdk-linux ENV ANDROID_NDK_VERSION 21.1.6352462 ENV ANDROID_VERSION 30 @@ -24,4 +24,4 @@ RUN $ANDROID_HOME/tools/bin/sdkmanager "build-tools;${ANDROID_BUILD_TOOLS_VERSIO ENV PATH ${ANDROID_NDK_HOME}:$PATH ENV PATH ${ANDROID_NDK_HOME}/prebuilt/linux-x86_64/bin/:$PATH -CMD mkdir -p /home/source/TMessagesProj/build/outputs/apk && cp -R /home/source/. /home/gradle && cd /home/gradle && gradle assembleRelease && cp -R /home/gradle/TMessagesProj/build/outputs/apk/. /home/source/TMessagesProj/build/outputs/apk +CMD mkdir -p /home/source/TMessagesProj/build/outputs/apk && mkdir -p /home/source/TMessagesProj/build/intermediates/ndkBuild && cp -R /home/source/. /home/gradle && cd /home/gradle && gradle assembleRelease && cp -R /home/gradle/TMessagesProj/build/outputs/apk/. /home/source/TMessagesProj/build/outputs/apk && cp -R /home/gradle/TMessagesProj/build/intermediates/ndkBuild/. /home/source/TMessagesProj/build/intermediates/ndkBuild diff --git a/TMessagesProj/build.gradle b/TMessagesProj/build.gradle index afff641c3..987f81847 100644 --- a/TMessagesProj/build.gradle +++ b/TMessagesProj/build.gradle @@ -43,7 +43,7 @@ dependencies { android { compileSdkVersion 30 - buildToolsVersion '30.0.1' + buildToolsVersion '30.0.2' ndkVersion "21.1.6352462" defaultConfig.applicationId = "org.telegram.messenger" @@ -280,7 +280,7 @@ android { } } - defaultConfig.versionCode = 2064 + defaultConfig.versionCode = 2065 applicationVariants.all { variant -> variant.outputs.all { output -> @@ -315,7 +315,7 @@ android { defaultConfig { minSdkVersion 16 targetSdkVersion 28 - versionName "7.0.0" + versionName "7.0.1" vectorDrawables.generatedDensities = ['mdpi', 'hdpi', 'xhdpi', 'xxhdpi'] diff --git a/TMessagesProj/jni/TgCalls.mk b/TMessagesProj/jni/TgCalls.mk index 33133121d..cf7e0653b 100644 --- a/TMessagesProj/jni/TgCalls.mk +++ b/TMessagesProj/jni/TgCalls.mk @@ -753,6 +753,7 @@ LOCAL_SRC_FILES := \ ./tgcalls/ThreadLocalObject.cpp \ ./tgcalls/VideoCaptureInterface.cpp \ ./tgcalls/VideoCaptureInterfaceImpl.cpp \ +./tgcalls/JsonConfig.cpp \ ./tgcalls/reference/InstanceImplReference.cpp \ ./tgcalls/legacy/InstanceImplLegacy.cpp \ ./tgcalls/platform/android/AndroidInterface.cpp \ diff --git a/TMessagesProj/jni/libtgvoip/client/android/org_telegram_messenger_voip_Instance.cpp b/TMessagesProj/jni/libtgvoip/client/android/org_telegram_messenger_voip_Instance.cpp index 18c1b77be..dcf03f1a1 100644 --- a/TMessagesProj/jni/libtgvoip/client/android/org_telegram_messenger_voip_Instance.cpp +++ b/TMessagesProj/jni/libtgvoip/client/android/org_telegram_messenger_voip_Instance.cpp @@ -69,14 +69,16 @@ struct InstanceHolder { std::unique_ptr nativeInstance; jobject javaInstance; std::shared_ptr _videoCapture; + std::shared_ptr _platformContext; }; jclass TrafficStatsClass; jclass FinalStateClass; +jclass NativeInstanceClass; jmethodID FinalStateInitMethod; jlong getInstanceHolderId(JNIEnv *env, jobject obj) { - return env->GetLongField(obj, env->GetFieldID(env->GetObjectClass(obj), "nativePtr", "J")); + return env->GetLongField(obj, env->GetFieldID(NativeInstanceClass, "nativePtr", "J")); } InstanceHolder *getInstanceHolder(JNIEnv *env, jobject obj) { @@ -231,6 +233,7 @@ void initWebRTC(JNIEnv *env) { rtc::InitializeSSL(); webrtcLoaded = true; + NativeInstanceClass = static_cast(env->NewGlobalRef(env->FindClass("org/telegram/messenger/voip/NativeInstance"))); TrafficStatsClass = static_cast(env->NewGlobalRef(env->FindClass("org/telegram/messenger/voip/Instance$TrafficStats"))); FinalStateClass = static_cast(env->NewGlobalRef(env->FindClass("org/telegram/messenger/voip/Instance$FinalState"))); FinalStateInitMethod = env->GetMethodID(FinalStateClass, "", "([BLjava/lang/String;Lorg/telegram/messenger/voip/Instance$TrafficStats;Z)V"); @@ -252,6 +255,8 @@ JNIEXPORT jlong JNICALL Java_org_telegram_messenger_voip_NativeInstance_makeNati jobject globalRef = env->NewGlobalRef(instanceObj); std::shared_ptr videoCapture = videoCapturer ? std::shared_ptr(reinterpret_cast(videoCapturer)) : nullptr; + std::shared_ptr platformContext = videoCapture ? videoCapture->getPlatformContext() : std::make_shared(env); + Descriptor descriptor = { .config = Config{ .initializationTimeout = configObject.getDoubleField("initializationTimeout"), @@ -273,23 +278,25 @@ JNIEXPORT jlong JNICALL Java_org_telegram_messenger_voip_NativeInstance_makeNati .stateUpdated = [globalRef](State state) { jint javaState = asJavaState(state); tgvoip::jni::DoWithJNI([globalRef, javaState](JNIEnv *env) { - env->CallVoidMethod(globalRef, env->GetMethodID(env->GetObjectClass(globalRef), "onStateUpdated", "(I)V"), javaState); + env->CallVoidMethod(globalRef, env->GetMethodID(NativeInstanceClass, "onStateUpdated", "(I)V"), javaState); }); }, + .platformContext = platformContext, .signalBarsUpdated = [globalRef](int count) { tgvoip::jni::DoWithJNI([globalRef, count](JNIEnv *env) { - env->CallVoidMethod(globalRef, env->GetMethodID(env->GetObjectClass(globalRef), "onSignalBarsUpdated", "(I)V"), count); + env->CallVoidMethod(globalRef, env->GetMethodID(NativeInstanceClass, "onSignalBarsUpdated", "(I)V"), count); }); }, .remoteMediaStateUpdated = [globalRef](AudioState audioState, VideoState videoState) { tgvoip::jni::DoWithJNI([globalRef, audioState, videoState](JNIEnv *env) { - env->CallVoidMethod(globalRef, env->GetMethodID(env->GetObjectClass(globalRef), "onRemoteMediaStateUpdated", "(II)V"), audioState, videoState); + env->CallVoidMethod(globalRef, env->GetMethodID(NativeInstanceClass, "onRemoteMediaStateUpdated", "(II)V"), (jint) audioState, (jint )videoState); }); }, .signalingDataEmitted = [globalRef](const std::vector &data) { tgvoip::jni::DoWithJNI([globalRef, data](JNIEnv *env) { jbyteArray arr = copyVectorToJavaByteArray(env, data); - env->CallVoidMethod(globalRef, env->GetMethodID(env->GetObjectClass(globalRef), "onSignalingData", "([B)V"), arr); + env->CallVoidMethod(globalRef, env->GetMethodID(NativeInstanceClass, "onSignalingData", "([B)V"), arr); + env->DeleteLocalRef(arr); }); }, }; @@ -336,6 +343,7 @@ JNIEXPORT jlong JNICALL Java_org_telegram_messenger_voip_NativeInstance_makeNati holder->nativeInstance = tgcalls::Meta::Create(v, std::move(descriptor)); holder->javaInstance = globalRef; holder->_videoCapture = videoCapture; + holder->_platformContext = platformContext; holder->nativeInstance->setIncomingVideoOutput(webrtc::JavaToNativeVideoSink(env, remoteSink)); holder->nativeInstance->setNetworkType(parseNetworkType(networkType)); return reinterpret_cast(holder); @@ -396,7 +404,7 @@ JNIEXPORT void JNICALL Java_org_telegram_messenger_voip_NativeInstance_stopNativ JNIEnv *env = webrtc::AttachCurrentThreadIfNeeded(); const std::string &path = tgvoip::jni::JavaStringToStdString(env, JavaObject(env, instance->javaInstance).getStringField("persistentStateFilePath")); savePersistentState(path.c_str(), finalState.persistentState); - env->CallVoidMethod(instance->javaInstance, env->GetMethodID(env->GetObjectClass(instance->javaInstance), "onStop", "(Lorg/telegram/messenger/voip/Instance$FinalState;)V"), asJavaFinalState(env, finalState)); + env->CallVoidMethod(instance->javaInstance, env->GetMethodID(NativeInstanceClass, "onStop", "(Lorg/telegram/messenger/voip/Instance$FinalState;)V"), asJavaFinalState(env, finalState)); env->DeleteGlobalRef(instance->javaInstance); delete instance; }); @@ -446,7 +454,7 @@ JNIEXPORT void JNICALL Java_org_telegram_messenger_voip_NativeInstance_setupOutg if (instance->_videoCapture) { return; } - instance->_videoCapture = tgcalls::VideoCaptureInterface::Create(std::make_shared(env)); + instance->_videoCapture = tgcalls::VideoCaptureInterface::Create(instance->_platformContext); instance->_videoCapture->setOutput(webrtc::JavaToNativeVideoSink(env, localSink)); instance->_videoCapture->setState(VideoState::Active); instance->nativeInstance->setVideoCapture(instance->_videoCapture); diff --git a/TMessagesProj/jni/rlottie/src/lottie/lottieparser.cpp b/TMessagesProj/jni/rlottie/src/lottie/lottieparser.cpp index ba0059048..220547810 100755 --- a/TMessagesProj/jni/rlottie/src/lottie/lottieparser.cpp +++ b/TMessagesProj/jni/rlottie/src/lottie/lottieparser.cpp @@ -773,8 +773,10 @@ std::shared_ptr LottieParserImpl::parseAsset() { return sharedAsset; } std::shared_ptr layer = parseLayer(); - staticFlag = staticFlag && layer->isStatic(); - asset->mLayers.push_back(layer); + if (layer) { + staticFlag = staticFlag && layer->isStatic(); + asset->mLayers.push_back(layer); + } } if (!IsValid()) { parsingError = true; diff --git a/TMessagesProj/jni/tgcalls/CodecSelectHelper.cpp b/TMessagesProj/jni/tgcalls/CodecSelectHelper.cpp index 5c18b6d45..83f3c5104 100644 --- a/TMessagesProj/jni/tgcalls/CodecSelectHelper.cpp +++ b/TMessagesProj/jni/tgcalls/CodecSelectHelper.cpp @@ -23,7 +23,7 @@ bool CompareFormats(const VideoFormat &a, const VideoFormat &b) { } } -int FormatPriority(const VideoFormat &format, const std::vector &preferredCodecs) { +int FormatPriority(const VideoFormat &format, const std::vector &preferredCodecs, std::shared_ptr platformContext) { static const auto kCodecs = { std::string(cricket::kAv1CodecName), std::string(cricket::kVp9CodecName), @@ -31,13 +31,13 @@ int FormatPriority(const VideoFormat &format, const std::vector &pr std::string(cricket::kH264CodecName), std::string(cricket::kVp8CodecName), }; - static const auto kSupported = [] { + static const auto kSupported = [platformContext] { const auto platform = PlatformInterface::SharedInstance(); auto result = std::vector(); result.reserve(kCodecs.size()); for (const auto &codec : kCodecs) { - if (platform->supportsEncoding(codec)) { + if (platform->supportsEncoding(codec, platformContext)) { result.push_back(codec); } } @@ -62,19 +62,19 @@ int FormatPriority(const VideoFormat &format, const std::vector &pr return -1; } -bool ComparePriorities(const VideoFormat &a, const VideoFormat &b, const std::vector &preferredCodecs) { - return FormatPriority(a, preferredCodecs) < FormatPriority(b, preferredCodecs); +bool ComparePriorities(const VideoFormat &a, const VideoFormat &b, const std::vector &preferredCodecs, std::shared_ptr platformContext) { + return FormatPriority(a, preferredCodecs, platformContext) < FormatPriority(b, preferredCodecs, platformContext); } -std::vector FilterAndSortEncoders(std::vector list, const std::vector &preferredCodecs) { +std::vector FilterAndSortEncoders(std::vector list, const std::vector &preferredCodecs, std::shared_ptr platformContext) { const auto listBegin = begin(list); const auto listEnd = end(list); - std::sort(listBegin, listEnd, [&preferredCodecs](const VideoFormat &lhs, const VideoFormat &rhs) { - return ComparePriorities(lhs, rhs, preferredCodecs); + std::sort(listBegin, listEnd, [&preferredCodecs, platformContext](const VideoFormat &lhs, const VideoFormat &rhs) { + return ComparePriorities(lhs, rhs, preferredCodecs, platformContext); }); auto eraseFrom = listBegin; auto eraseTill = eraseFrom; - while (eraseTill != listEnd && FormatPriority(*eraseTill, preferredCodecs) == -1) { + while (eraseTill != listEnd && FormatPriority(*eraseTill, preferredCodecs, platformContext) == -1) { ++eraseTill; } if (eraseTill != eraseFrom) { @@ -142,8 +142,9 @@ void AddDefaultFeedbackParams(cricket::VideoCodec *codec) { VideoFormatsMessage ComposeSupportedFormats( std::vector encoders, std::vector decoders, - const std::vector &preferredCodecs) { - encoders = FilterAndSortEncoders(std::move(encoders), preferredCodecs); + const std::vector &preferredCodecs, + std::shared_ptr platformContext) { + encoders = FilterAndSortEncoders(std::move(encoders), preferredCodecs, platformContext); auto result = VideoFormatsMessage(); result.encodersCount = (int)encoders.size(); diff --git a/TMessagesProj/jni/tgcalls/CodecSelectHelper.h b/TMessagesProj/jni/tgcalls/CodecSelectHelper.h index d50ffc329..8c9e741f1 100644 --- a/TMessagesProj/jni/tgcalls/CodecSelectHelper.h +++ b/TMessagesProj/jni/tgcalls/CodecSelectHelper.h @@ -6,6 +6,8 @@ namespace tgcalls { +class PlatformContext; + struct CommonFormats { std::vector list; int myEncoderIndex = -1; @@ -19,7 +21,8 @@ struct CommonCodecs { VideoFormatsMessage ComposeSupportedFormats( std::vector encoders, std::vector decoders, - const std::vector &preferredCodecs); + const std::vector &preferredCodecs, + std::shared_ptr platformContext); CommonFormats ComputeCommonFormats( const VideoFormatsMessage &my, diff --git a/TMessagesProj/jni/tgcalls/Instance.h b/TMessagesProj/jni/tgcalls/Instance.h index 38520d817..2f3add501 100644 --- a/TMessagesProj/jni/tgcalls/Instance.h +++ b/TMessagesProj/jni/tgcalls/Instance.h @@ -19,6 +19,7 @@ class VideoFrame; namespace tgcalls { class VideoCaptureInterface; +class PlatformContext; struct Proxy { std::string host; @@ -203,6 +204,7 @@ struct Descriptor { std::function remoteMediaStateUpdated; std::function remotePrefferedAspectRatioUpdated; std::function &)> signalingDataEmitted; + std::shared_ptr platformContext; }; class Meta { diff --git a/TMessagesProj/jni/tgcalls/JsonConfig.cpp b/TMessagesProj/jni/tgcalls/JsonConfig.cpp new file mode 100644 index 000000000..80737e9ca --- /dev/null +++ b/TMessagesProj/jni/tgcalls/JsonConfig.cpp @@ -0,0 +1,13 @@ +#include "JsonConfig.h" + +namespace tgcalls { + +JsonConfig::JsonConfig(Values values) : _values(values) { + +} + +Value JsonConfig::getValue(std::string key) { + return _values[key]; +} + +} // namespace tgcalls diff --git a/TMessagesProj/jni/tgcalls/JsonConfig.h b/TMessagesProj/jni/tgcalls/JsonConfig.h new file mode 100644 index 000000000..c9bd0f853 --- /dev/null +++ b/TMessagesProj/jni/tgcalls/JsonConfig.h @@ -0,0 +1,25 @@ +#ifndef TGCALLS_JSON_CONFIG_H +#define TGCALLS_JSON_CONFIG_H + +#include +#include +#include "absl/types/variant.h" + +namespace tgcalls { + +typedef absl::variant Value; +typedef std::map Values; + +class JsonConfig { + +public: + JsonConfig(Values values); + Value getValue(std::string key); + +private: + Values _values; +}; + +} // namespace tgcalls + +#endif diff --git a/TMessagesProj/jni/tgcalls/Manager.cpp b/TMessagesProj/jni/tgcalls/Manager.cpp index de3f869da..553c0ac92 100644 --- a/TMessagesProj/jni/tgcalls/Manager.cpp +++ b/TMessagesProj/jni/tgcalls/Manager.cpp @@ -49,7 +49,8 @@ _remotePrefferedAspectRatioUpdated(std::move(descriptor.remotePrefferedAspectRat _signalingDataEmitted(std::move(descriptor.signalingDataEmitted)), _signalBarsUpdated(std::move(descriptor.signalBarsUpdated)), _localPreferredVideoAspectRatio(descriptor.config.preferredAspectRatio), -_enableHighBitrateVideo(descriptor.config.enableHighBitrateVideo) { +_enableHighBitrateVideo(descriptor.config.enableHighBitrateVideo), +_platformContext(descriptor.platformContext) { assert(_thread->IsCurrent()); assert(_stateUpdated != nullptr); assert(_signalingDataEmitted != nullptr); @@ -166,7 +167,7 @@ void Manager::start() { }); })); bool isOutgoing = _encryptionKey.isOutgoing; - _mediaManager.reset(new ThreadLocalObject(getMediaThread(), [weak, isOutgoing, thread, sendSignalingMessage, videoCapture = _videoCapture, localPreferredVideoAspectRatio = _localPreferredVideoAspectRatio, enableHighBitrateVideo = _enableHighBitrateVideo, signalBarsUpdated = _signalBarsUpdated, preferredCodecs = _preferredCodecs]() { + _mediaManager.reset(new ThreadLocalObject(getMediaThread(), [weak, isOutgoing, thread, sendSignalingMessage, videoCapture = _videoCapture, localPreferredVideoAspectRatio = _localPreferredVideoAspectRatio, enableHighBitrateVideo = _enableHighBitrateVideo, signalBarsUpdated = _signalBarsUpdated, preferredCodecs = _preferredCodecs, platformContext = _platformContext]() { return new MediaManager( getMediaThread(), isOutgoing, @@ -184,7 +185,8 @@ void Manager::start() { signalBarsUpdated, localPreferredVideoAspectRatio, enableHighBitrateVideo, - preferredCodecs); + preferredCodecs, + platformContext); })); _networkManager->perform(RTC_FROM_HERE, [](NetworkManager *networkManager) { networkManager->start(); diff --git a/TMessagesProj/jni/tgcalls/Manager.h b/TMessagesProj/jni/tgcalls/Manager.h index 3368231d2..b5dc66672 100644 --- a/TMessagesProj/jni/tgcalls/Manager.h +++ b/TMessagesProj/jni/tgcalls/Manager.h @@ -57,6 +57,8 @@ private: bool _localNetworkIsLowCost = false; bool _remoteNetworkIsLowCost = false; + std::shared_ptr _platformContext; + }; } // namespace tgcalls diff --git a/TMessagesProj/jni/tgcalls/MediaManager.cpp b/TMessagesProj/jni/tgcalls/MediaManager.cpp index 7cecc060a..21411d713 100644 --- a/TMessagesProj/jni/tgcalls/MediaManager.cpp +++ b/TMessagesProj/jni/tgcalls/MediaManager.cpp @@ -59,7 +59,8 @@ MediaManager::MediaManager( std::function signalBarsUpdated, float localPreferredVideoAspectRatio, bool enableHighBitrateVideo, - std::vector preferredCodecs) : + std::vector preferredCodecs, + std::shared_ptr platformContext) : _thread(thread), _eventLog(std::make_unique()), _taskQueueFactory(webrtc::CreateDefaultTaskQueueFactory()), @@ -69,7 +70,8 @@ _signalBarsUpdated(std::move(signalBarsUpdated)), _outgoingVideoState(videoCapture ? VideoState::Active : VideoState::Inactive), _videoCapture(std::move(videoCapture)), _localPreferredVideoAspectRatio(localPreferredVideoAspectRatio), -_enableHighBitrateVideo(enableHighBitrateVideo) { +_enableHighBitrateVideo(enableHighBitrateVideo), +_platformContext(platformContext) { _ssrcAudio.incoming = isOutgoing ? ssrcAudioIncoming : ssrcAudioOutgoing; _ssrcAudio.outgoing = (!isOutgoing) ? ssrcAudioIncoming : ssrcAudioOutgoing; _ssrcAudio.fecIncoming = isOutgoing ? ssrcAudioFecIncoming : ssrcAudioFecOutgoing; @@ -99,13 +101,14 @@ _enableHighBitrateVideo(enableHighBitrateVideo) { mediaDeps.audio_encoder_factory = webrtc::CreateAudioEncoderFactory(); mediaDeps.audio_decoder_factory = webrtc::CreateAudioDecoderFactory(); - mediaDeps.video_encoder_factory = PlatformInterface::SharedInstance()->makeVideoEncoderFactory(); - mediaDeps.video_decoder_factory = PlatformInterface::SharedInstance()->makeVideoDecoderFactory(); + mediaDeps.video_encoder_factory = PlatformInterface::SharedInstance()->makeVideoEncoderFactory(_platformContext); + mediaDeps.video_decoder_factory = PlatformInterface::SharedInstance()->makeVideoDecoderFactory(_platformContext); _myVideoFormats = ComposeSupportedFormats( mediaDeps.video_encoder_factory->GetSupportedFormats(), mediaDeps.video_decoder_factory->GetSupportedFormats(), - preferredCodecs); + preferredCodecs, + _platformContext); mediaDeps.audio_processing = webrtc::AudioProcessingBuilder().Create(); _mediaEngine = cricket::CreateMediaEngine(std::move(mediaDeps)); @@ -121,6 +124,9 @@ _enableHighBitrateVideo(enableHighBitrateVideo) { audioOptions.noise_suppression = true; audioOptions.audio_jitter_buffer_fast_accelerate = true; + std::vector streamIds; + streamIds.push_back("1"); + _audioChannel.reset(_mediaEngine->voice().CreateMediaChannel(_call.get(), cricket::MediaConfig(), audioOptions, webrtc::CryptoOptions::NoGcm())); _videoChannel.reset(_mediaEngine->video().CreateMediaChannel(_call.get(), cricket::MediaConfig(), cricket::VideoOptions(), webrtc::CryptoOptions::NoGcm(), _videoBitrateAllocatorFactory.get())); @@ -166,7 +172,9 @@ _enableHighBitrateVideo(enableHighBitrateVideo) { audioRecvParameters.rtcp.remote_estimate = true; _audioChannel->SetRecvParameters(audioRecvParameters); - _audioChannel->AddRecvStream(cricket::StreamParams::CreateLegacy(_ssrcAudio.incoming)); + cricket::StreamParams audioRecvStreamParams = cricket::StreamParams::CreateLegacy(_ssrcAudio.incoming); + audioRecvStreamParams.set_stream_ids(streamIds); + _audioChannel->AddRecvStream(audioRecvStreamParams); _audioChannel->SetPlayout(true); _videoChannel->SetInterface(_videoNetworkInterface.get()); @@ -506,6 +514,9 @@ void MediaManager::checkIsReceivingVideoChanged(bool wasReceiving) { videoRecvStreamParams.ssrcs = {_ssrcVideo.incoming}; videoRecvStreamParams.ssrc_groups.push_back(videoRecvSsrcGroup); videoRecvStreamParams.cname = "cname"; + std::vector streamIds; + streamIds.push_back("1"); + videoRecvStreamParams.set_stream_ids(streamIds); _videoChannel->SetRecvParameters(videoRecvParameters); _videoChannel->AddRecvStream(videoRecvStreamParams); diff --git a/TMessagesProj/jni/tgcalls/MediaManager.h b/TMessagesProj/jni/tgcalls/MediaManager.h index 82a035681..44cf7ee16 100644 --- a/TMessagesProj/jni/tgcalls/MediaManager.h +++ b/TMessagesProj/jni/tgcalls/MediaManager.h @@ -43,7 +43,8 @@ public: std::function signalBarsUpdated, float localPreferredVideoAspectRatio, bool enableHighBitrateVideo, - std::vector preferredCodecs); + std::vector preferredCodecs, + std::shared_ptr platformContext); ~MediaManager(); void start(); @@ -140,6 +141,8 @@ private: std::unique_ptr _audioNetworkInterface; std::unique_ptr _videoNetworkInterface; + + std::shared_ptr _platformContext; }; } // namespace tgcalls diff --git a/TMessagesProj/jni/tgcalls/NetworkManager.h b/TMessagesProj/jni/tgcalls/NetworkManager.h index a1eda7d13..54309cfd0 100644 --- a/TMessagesProj/jni/tgcalls/NetworkManager.h +++ b/TMessagesProj/jni/tgcalls/NetworkManager.h @@ -17,7 +17,7 @@ namespace rtc { class BasicPacketSocketFactory; class BasicNetworkManager; class PacketTransportInternal; -class NetworkRoute; +struct NetworkRoute; } // namespace rtc namespace cricket { diff --git a/TMessagesProj/jni/tgcalls/VideoCaptureInterface.h b/TMessagesProj/jni/tgcalls/VideoCaptureInterface.h index c10c69cea..39e510a40 100644 --- a/TMessagesProj/jni/tgcalls/VideoCaptureInterface.h +++ b/TMessagesProj/jni/tgcalls/VideoCaptureInterface.h @@ -35,6 +35,9 @@ public: virtual void setState(VideoState state) = 0; virtual void setPreferredAspectRatio(float aspectRatio) = 0; virtual void setOutput(std::shared_ptr> sink) = 0; + virtual std::shared_ptr getPlatformContext() { + return nullptr; + } }; diff --git a/TMessagesProj/jni/tgcalls/VideoCaptureInterfaceImpl.cpp b/TMessagesProj/jni/tgcalls/VideoCaptureInterfaceImpl.cpp index 96455d735..38b277730 100644 --- a/TMessagesProj/jni/tgcalls/VideoCaptureInterfaceImpl.cpp +++ b/TMessagesProj/jni/tgcalls/VideoCaptureInterfaceImpl.cpp @@ -16,7 +16,7 @@ VideoCaptureInterfaceObject::VideoCaptureInterfaceObject(std::shared_ptr_stateUpdated) { this->_stateUpdated(state); } - }, platformContext, _videoCapturerResolution); + }, _platformContext, _videoCapturerResolution); } } @@ -61,14 +61,14 @@ void VideoCaptureInterfaceObject::setPreferredAspectRatio(float aspectRatio) { if (aspectRatio > 0.01 && _videoCapturerResolution.first != 0 && _videoCapturerResolution.second != 0) { float originalWidth = (float)_videoCapturerResolution.first; float originalHeight = (float)_videoCapturerResolution.second; - + float width = (originalWidth > aspectRatio * originalHeight) ? int(std::round(aspectRatio * originalHeight)) : originalWidth; float height = (originalWidth > aspectRatio * originalHeight) ? originalHeight : int(std::round(originalHeight / aspectRatio)); - + PlatformInterface::SharedInstance()->adaptVideoSource(_videoSource, (int)width, (int)height, 30); } } @@ -86,7 +86,8 @@ void VideoCaptureInterfaceObject::setStateUpdated(std::function platformContext) : -_impl(Manager::getMediaThread(), [platformContext]() { + _platformContext(platformContext), + _impl(Manager::getMediaThread(), [platformContext]() { return new VideoCaptureInterfaceObject(platformContext); }) { } @@ -117,8 +118,12 @@ void VideoCaptureInterfaceImpl::setOutput(std::shared_ptr VideoCaptureInterfaceImpl::getPlatformContext() { + return _platformContext; +} + ThreadLocalObject *VideoCaptureInterfaceImpl::object() { return &_impl; } -} // namespace tgcalls +}// namespace tgcalls diff --git a/TMessagesProj/jni/tgcalls/VideoCaptureInterfaceImpl.h b/TMessagesProj/jni/tgcalls/VideoCaptureInterfaceImpl.h index 65293f3cc..58beba4e8 100644 --- a/TMessagesProj/jni/tgcalls/VideoCaptureInterfaceImpl.h +++ b/TMessagesProj/jni/tgcalls/VideoCaptureInterfaceImpl.h @@ -44,11 +44,13 @@ public: void setState(VideoState state) override; void setPreferredAspectRatio(float aspectRatio) override; void setOutput(std::shared_ptr> sink) override; + std::shared_ptr getPlatformContext() override; ThreadLocalObject *object(); private: ThreadLocalObject _impl; + std::shared_ptr _platformContext; }; diff --git a/TMessagesProj/jni/tgcalls/platform/PlatformInterface.h b/TMessagesProj/jni/tgcalls/platform/PlatformInterface.h index d92096919..d6d893cdd 100644 --- a/TMessagesProj/jni/tgcalls/platform/PlatformInterface.h +++ b/TMessagesProj/jni/tgcalls/platform/PlatformInterface.h @@ -23,9 +23,9 @@ public: virtual float getDisplayAspectRatio() { return 0.0f; } - virtual std::unique_ptr makeVideoEncoderFactory() = 0; - virtual std::unique_ptr makeVideoDecoderFactory() = 0; - virtual bool supportsEncoding(const std::string &codecName) = 0; + virtual std::unique_ptr makeVideoEncoderFactory(std::shared_ptr platformContext) = 0; + virtual std::unique_ptr makeVideoDecoderFactory(std::shared_ptr platformContext) = 0; + virtual bool supportsEncoding(const std::string &codecName, std::shared_ptr platformContext) = 0; virtual rtc::scoped_refptr makeVideoSource(rtc::Thread *signalingThread, rtc::Thread *workerThread) = 0; virtual void adaptVideoSource(rtc::scoped_refptr videoSource, int width, int height, int fps) = 0; virtual std::unique_ptr makeVideoCapturer(rtc::scoped_refptr source, bool useFrontCamera, std::function stateUpdated, std::shared_ptr platformContext, std::pair &outResolution) = 0; diff --git a/TMessagesProj/jni/tgcalls/platform/android/AndroidInterface.cpp b/TMessagesProj/jni/tgcalls/platform/android/AndroidInterface.cpp index 0b4e1b510..13fb957ba 100644 --- a/TMessagesProj/jni/tgcalls/platform/android/AndroidInterface.cpp +++ b/TMessagesProj/jni/tgcalls/platform/android/AndroidInterface.cpp @@ -16,6 +16,7 @@ #include "api/video_codecs/builtin_video_encoder_factory.h" #include "api/video_codecs/builtin_video_decoder_factory.h" #include "api/video_track_source_proxy.h" +#include "AndroidContext.h" namespace tgcalls { @@ -28,29 +29,29 @@ float AndroidInterface::getDisplayAspectRatio() { return 0; } -std::unique_ptr AndroidInterface::makeVideoEncoderFactory() { +std::unique_ptr AndroidInterface::makeVideoEncoderFactory(std::shared_ptr platformContext) { JNIEnv *env = webrtc::AttachCurrentThreadIfNeeded(); - webrtc::ScopedJavaLocalRef factory_class = - webrtc::GetClass(env, "org/webrtc/DefaultVideoEncoderFactory"); - jmethodID factory_constructor = env->GetMethodID( - factory_class.obj(), "", "(Lorg/webrtc/EglBase$Context;ZZ)V"); - webrtc::ScopedJavaLocalRef factory_object( - env, env->NewObject(factory_class.obj(), factory_constructor, - nullptr /* shared_context */, - false /* enable_intel_vp8_encoder */, - true /* enable_h264_high_profile */)); + + AndroidContext *context = (AndroidContext *) platformContext.get(); + jmethodID methodId = env->GetMethodID(context->getJavaCapturerClass(), "getSharedEGLContext", "()Lorg/webrtc/EglBase$Context;"); + jobject eglContext = env->CallObjectMethod(context->getJavaCapturer(), methodId); + + webrtc::ScopedJavaLocalRef factory_class = webrtc::GetClass(env, "org/webrtc/DefaultVideoEncoderFactory"); + jmethodID factory_constructor = env->GetMethodID(factory_class.obj(), "", "(Lorg/webrtc/EglBase$Context;ZZ)V"); + webrtc::ScopedJavaLocalRef factory_object(env, env->NewObject(factory_class.obj(), factory_constructor, eglContext, false, true)); return webrtc::JavaToNativeVideoEncoderFactory(env, factory_object.obj()); } -std::unique_ptr AndroidInterface::makeVideoDecoderFactory() { +std::unique_ptr AndroidInterface::makeVideoDecoderFactory(std::shared_ptr platformContext) { JNIEnv *env = webrtc::AttachCurrentThreadIfNeeded(); - webrtc::ScopedJavaLocalRef factory_class = - webrtc::GetClass(env, "org/webrtc/DefaultVideoDecoderFactory"); - jmethodID factory_constructor = env->GetMethodID( - factory_class.obj(), "", "(Lorg/webrtc/EglBase$Context;)V"); - webrtc::ScopedJavaLocalRef factory_object( - env, env->NewObject(factory_class.obj(), factory_constructor, - nullptr /* shared_context */)); + + AndroidContext *context = (AndroidContext *) platformContext.get(); + jmethodID methodId = env->GetMethodID(context->getJavaCapturerClass(), "getSharedEGLContext", "()Lorg/webrtc/EglBase$Context;"); + jobject eglContext = env->CallObjectMethod(context->getJavaCapturer(), methodId); + + webrtc::ScopedJavaLocalRef factory_class = webrtc::GetClass(env, "org/webrtc/DefaultVideoDecoderFactory"); + jmethodID factory_constructor = env->GetMethodID(factory_class.obj(), "", "(Lorg/webrtc/EglBase$Context;)V"); + webrtc::ScopedJavaLocalRef factory_object(env, env->NewObject(factory_class.obj(), factory_constructor, eglContext)); return webrtc::JavaToNativeVideoDecoderFactory(env, factory_object.obj()); } @@ -64,18 +65,17 @@ rtc::scoped_refptr AndroidInterface::makeVide return webrtc::VideoTrackSourceProxy::Create(signalingThread, workerThread, _source); } -bool AndroidInterface::supportsEncoding(const std::string &codecName) { +bool AndroidInterface::supportsEncoding(const std::string &codecName, std::shared_ptr platformContext) { if (hardwareVideoEncoderFactory == nullptr) { JNIEnv *env = webrtc::AttachCurrentThreadIfNeeded(); - webrtc::ScopedJavaLocalRef factory_class = - webrtc::GetClass(env, "org/webrtc/HardwareVideoEncoderFactory"); - jmethodID factory_constructor = env->GetMethodID( - factory_class.obj(), "", "(Lorg/webrtc/EglBase$Context;ZZ)V"); - webrtc::ScopedJavaLocalRef factory_object( - env, env->NewObject(factory_class.obj(), factory_constructor, - nullptr, - false, - true)); + + AndroidContext *context = (AndroidContext *) platformContext.get(); + jmethodID methodId = env->GetMethodID(context->getJavaCapturerClass(), "getSharedEGLContext", "()Lorg/webrtc/EglBase$Context;"); + jobject eglContext = env->CallObjectMethod(context->getJavaCapturer(), methodId); + + webrtc::ScopedJavaLocalRef factory_class = webrtc::GetClass(env, "org/webrtc/HardwareVideoEncoderFactory"); + jmethodID factory_constructor = env->GetMethodID(factory_class.obj(), "", "(Lorg/webrtc/EglBase$Context;ZZ)V"); + webrtc::ScopedJavaLocalRef factory_object(env, env->NewObject(factory_class.obj(), factory_constructor, eglContext, false, true)); hardwareVideoEncoderFactory = webrtc::JavaToNativeVideoEncoderFactory(env, factory_object.obj()); } auto formats = hardwareVideoEncoderFactory->GetSupportedFormats(); diff --git a/TMessagesProj/jni/tgcalls/platform/android/AndroidInterface.h b/TMessagesProj/jni/tgcalls/platform/android/AndroidInterface.h index 63fc9d7e7..1a9273a91 100644 --- a/TMessagesProj/jni/tgcalls/platform/android/AndroidInterface.h +++ b/TMessagesProj/jni/tgcalls/platform/android/AndroidInterface.h @@ -11,9 +11,9 @@ class AndroidInterface : public PlatformInterface { public: void configurePlatformAudio() override; float getDisplayAspectRatio() override; - std::unique_ptr makeVideoEncoderFactory() override; - std::unique_ptr makeVideoDecoderFactory() override; - bool supportsEncoding(const std::string &codecName) override; + std::unique_ptr makeVideoEncoderFactory(std::shared_ptr platformContext) override; + std::unique_ptr makeVideoDecoderFactory(std::shared_ptr platformContext) override; + bool supportsEncoding(const std::string &codecName, std::shared_ptr platformContext) override; rtc::scoped_refptr makeVideoSource(rtc::Thread *signalingThread, rtc::Thread *workerThread) override; void adaptVideoSource(rtc::scoped_refptr videoSource, int width, int height, int fps) override; std::unique_ptr makeVideoCapturer(rtc::scoped_refptr source, bool useFrontCamera, std::function stateUpdated, std::shared_ptr platformContext, std::pair &outResolution) override; diff --git a/TMessagesProj/jni/tgcalls/reference/InstanceImplReference.cpp b/TMessagesProj/jni/tgcalls/reference/InstanceImplReference.cpp index f594151c2..0ac89980d 100644 --- a/TMessagesProj/jni/tgcalls/reference/InstanceImplReference.cpp +++ b/TMessagesProj/jni/tgcalls/reference/InstanceImplReference.cpp @@ -241,7 +241,8 @@ public: _videoCapture(descriptor.videoCapture), _localPreferredVideoAspectRatio(descriptor.config.preferredAspectRatio), _state(State::Reconnecting), - _videoState(_videoCapture ? VideoState::Active : VideoState::Inactive) { + _videoState(_videoCapture ? VideoState::Active : VideoState::Inactive), + _platformContext(descriptor.platformContext) { assert(getMediaThread()->IsCurrent()); rtc::LogMessage::LogToDebug(rtc::LS_INFO); @@ -304,8 +305,8 @@ public: mediaDeps.task_queue_factory = dependencies.task_queue_factory.get(); mediaDeps.audio_encoder_factory = webrtc::CreateBuiltinAudioEncoderFactory(); mediaDeps.audio_decoder_factory = webrtc::CreateBuiltinAudioDecoderFactory(); - mediaDeps.video_encoder_factory = PlatformInterface::SharedInstance()->makeVideoEncoderFactory(); - mediaDeps.video_decoder_factory = PlatformInterface::SharedInstance()->makeVideoDecoderFactory(); + mediaDeps.video_encoder_factory = PlatformInterface::SharedInstance()->makeVideoEncoderFactory(_platformContext); + mediaDeps.video_decoder_factory = PlatformInterface::SharedInstance()->makeVideoDecoderFactory(_platformContext); webrtc::AudioProcessing *apm = webrtc::AudioProcessingBuilder().Create(); webrtc::AudioProcessing::Config audioConfig; @@ -926,6 +927,8 @@ private: bool _didSetRemoteDescription = false; std::vector> _pendingRemoteIceCandidates; + + std::shared_ptr _platformContext; }; InstanceImplReference::InstanceImplReference(Descriptor &&descriptor) : diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/BuildVars.java b/TMessagesProj/src/main/java/org/telegram/messenger/BuildVars.java index 0011b59be..c867ef0a2 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/BuildVars.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/BuildVars.java @@ -19,7 +19,7 @@ public class BuildVars { public static boolean USE_CLOUD_STRINGS = true; public static boolean CHECK_UPDATES = true; public static boolean TON_WALLET_STANDALONE = false; - public static int BUILD_VERSION = 2064; + public static int BUILD_VERSION = 2065; public static String BUILD_VERSION_STRING = "7.0.0"; public static int APP_ID = 4; public static String APP_HASH = "014b35b6184100b085b0d0572f9b5103"; diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/ImageReceiver.java b/TMessagesProj/src/main/java/org/telegram/messenger/ImageReceiver.java index 3eb97af4c..2208f2443 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/ImageReceiver.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/ImageReceiver.java @@ -880,7 +880,12 @@ public class ImageReceiver implements NotificationCenter.NotificationCenterDeleg roundRect.set(drawRegion); if (isRoundRect) { - canvas.drawRoundRect(roundRect,roundRadius[0], roundRadius[0],roundPaint); + try { + canvas.drawRoundRect(roundRect,roundRadius[0], roundRadius[0],roundPaint); + } catch (Exception e) { + onBitmapException(bitmapDrawable); + FileLog.e(e); + } } else { for (int a = 0; a < roundRadius.length; a++) { radii[a * 2] = roundRadius[a]; @@ -930,7 +935,12 @@ public class ImageReceiver implements NotificationCenter.NotificationCenterDeleg roundPaint.setAlpha(alpha); if (isRoundRect) { - canvas.drawRoundRect(roundRect, roundRadius[0], roundRadius[0], roundPaint); + try { + canvas.drawRoundRect(roundRect, roundRadius[0], roundRadius[0], roundPaint); + } catch (Exception e) { + onBitmapException(bitmapDrawable); + FileLog.e(e); + } } else { for (int a = 0; a < roundRadius.length; a++) { radii[a * 2] = roundRadius[a]; diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/NotificationsController.java b/TMessagesProj/src/main/java/org/telegram/messenger/NotificationsController.java index 768ce7070..6cd40d393 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/NotificationsController.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/NotificationsController.java @@ -3187,8 +3187,14 @@ public class NotificationsController extends BaseController { int selfUserId = getUserConfig().getClientUserId(); boolean waitingForPasscode = AndroidUtilities.needShowPasscode() || SharedConfig.isWaitingForPasscodeEnter; + int maxCount; + if (UserConfig.getActivatedAccountsCount() >= 3) { + maxCount = 7; + } else { + maxCount = 10; + } for (int b = 0, size = sortedDialogs.size(); b < size; b++) { - if (holders.size() >= 15) { + if (holders.size() >= maxCount) { break; } long dialog_id = sortedDialogs.get(b); diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/voip/CallNotificationSoundProvider.java b/TMessagesProj/src/main/java/org/telegram/messenger/voip/CallNotificationSoundProvider.java index 3ce3e8059..fdf00c313 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/voip/CallNotificationSoundProvider.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/voip/CallNotificationSoundProvider.java @@ -11,7 +11,6 @@ import androidx.annotation.Nullable; import org.telegram.messenger.ApplicationLoader; import java.io.FileNotFoundException; -import java.io.IOException; /** * This is a very dirty hack to allow Telegram calls to respect user's DND settings. @@ -66,20 +65,20 @@ public class CallNotificationSoundProvider extends ContentProvider { throw new FileNotFoundException("Unexpected application state"); } - VoIPBaseService srv = VoIPBaseService.getSharedInstance(); - if (srv != null) { - srv.startRingtoneAndVibration(); - } - try { + VoIPBaseService srv = VoIPBaseService.getSharedInstance(); + if (srv != null) { + srv.startRingtoneAndVibration(); + } + ParcelFileDescriptor[] pipe = ParcelFileDescriptor.createPipe(); ParcelFileDescriptor.AutoCloseOutputStream outputStream = new ParcelFileDescriptor.AutoCloseOutputStream(pipe[1]); byte[] silentWav = {82, 73, 70, 70, 41, 0, 0, 0, 87, 65, 86, 69, 102, 109, 116, 32, 16, 0, 0, 0, 1, 0, 1, 0, 68, (byte) 172, 0, 0, 16, (byte) 177, 2, 0, 2, 0, 16, 0, 100, 97, 116, 97, 10, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}; outputStream.write(silentWav); outputStream.close(); return pipe[0]; - } catch (IOException x) { - throw new FileNotFoundException(x.getMessage()); + } catch (Exception e) { + throw new FileNotFoundException(e.getMessage()); } } } diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/voip/Instance.java b/TMessagesProj/src/main/java/org/telegram/messenger/voip/Instance.java index 13b09fc41..b20983530 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/voip/Instance.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/voip/Instance.java @@ -330,6 +330,15 @@ public final class Instance { public final boolean useSystemAec; public final double hangupUiTimeout; + public final boolean enable_vp8_encoder; + public final boolean enable_vp8_decoder; + public final boolean enable_vp9_encoder; + public final boolean enable_vp9_decoder; + public final boolean enable_h265_encoder; + public final boolean enable_h265_decoder; + public final boolean enable_h264_encoder; + public final boolean enable_h264_decoder; + private final JSONObject jsonObject; private ServerConfig(JSONObject jsonObject) { @@ -337,6 +346,15 @@ public final class Instance { this.useSystemNs = jsonObject.optBoolean("use_system_ns", true); this.useSystemAec = jsonObject.optBoolean("use_system_aec", true); this.hangupUiTimeout = jsonObject.optDouble("hangup_ui_timeout", 5); + + this.enable_vp8_encoder = jsonObject.optBoolean("enable_vp8_encoder", true); + this.enable_vp8_decoder = jsonObject.optBoolean("enable_vp8_decoder", true); + this.enable_vp9_encoder = jsonObject.optBoolean("enable_vp9_encoder", true); + this.enable_vp9_decoder = jsonObject.optBoolean("enable_vp9_decoder", true); + this.enable_h265_encoder = jsonObject.optBoolean("enable_h265_encoder", true); + this.enable_h265_decoder = jsonObject.optBoolean("enable_h265_decoder", true); + this.enable_h264_encoder = jsonObject.optBoolean("enable_h264_encoder", true); + this.enable_h264_decoder = jsonObject.optBoolean("enable_h264_decoder", true); } public String getString(String key) { diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/voip/VideoCameraCapturer.java b/TMessagesProj/src/main/java/org/telegram/messenger/voip/VideoCameraCapturer.java index 303171891..36e7e55cd 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/voip/VideoCameraCapturer.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/voip/VideoCameraCapturer.java @@ -5,6 +5,8 @@ import android.os.Build; import android.os.Handler; import android.os.HandlerThread; +import com.google.android.datatransport.runtime.logging.Logging; + import org.telegram.messenger.AndroidUtilities; import org.telegram.messenger.ApplicationLoader; import org.telegram.messenger.FileLog; @@ -44,6 +46,7 @@ public class VideoCameraCapturer { if (Build.VERSION.SDK_INT < 18) { return; } + Logging.i("VideoCameraCapturer", "device model = " + Build.MANUFACTURER + Build.MODEL); AndroidUtilities.runOnUIThread(() -> { instance = this; thread = new HandlerThread("CallThread"); @@ -192,5 +195,12 @@ public class VideoCameraCapturer { }); } + private EglBase.Context getSharedEGLContext() { + if (eglBase == null) { + eglBase = EglBase.create(null, EglBase.CONFIG_PLAIN); + } + return eglBase != null ? eglBase.getEglBaseContext() : null; + } + private static native CapturerObserver nativeGetJavaVideoCapturerObserver(long ptr); } diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/voip/VoIPBaseService.java b/TMessagesProj/src/main/java/org/telegram/messenger/voip/VoIPBaseService.java index 3c3f76d94..961cee631 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/voip/VoIPBaseService.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/voip/VoIPBaseService.java @@ -160,7 +160,7 @@ public abstract class VoIPBaseService extends Service implements SensorEventList protected long callStartTime; protected boolean playingSound; protected boolean isOutgoing; - protected boolean videoCall; + public boolean videoCall; protected long videoCapturer; protected Runnable timeoutRunnable; @@ -1511,7 +1511,7 @@ public abstract class VoIPBaseService extends Service implements SensorEventList } // some non-Google devices don't implement the ConnectionService API correctly so, sadly, // we'll have to whitelist only a handful of known-compatible devices for now - return "angler".equals(Build.PRODUCT) // Nexus 6P + return false;/*"angler".equals(Build.PRODUCT) // Nexus 6P || "bullhead".equals(Build.PRODUCT) // Nexus 5X || "sailfish".equals(Build.PRODUCT) // Pixel || "marlin".equals(Build.PRODUCT) // Pixel XL @@ -1519,7 +1519,7 @@ public abstract class VoIPBaseService extends Service implements SensorEventList || "taimen".equals(Build.PRODUCT) // Pixel 2 XL || "blueline".equals(Build.PRODUCT) // Pixel 3 || "crosshatch".equals(Build.PRODUCT) // Pixel 3 XL - || MessagesController.getGlobalMainSettings().getBoolean("dbg_force_connection_service", false); + || MessagesController.getGlobalMainSettings().getBoolean("dbg_force_connection_service", false);*/ } public interface StateListener { diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/voip/VoIPService.java b/TMessagesProj/src/main/java/org/telegram/messenger/voip/VoIPService.java index 13d2f290e..f544f8e59 100755 --- a/TMessagesProj/src/main/java/org/telegram/messenger/voip/VoIPService.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/voip/VoIPService.java @@ -941,6 +941,7 @@ public class VoIPService extends VoIPBaseService { PendingIntent.getActivity(VoIPService.this, 0, new Intent(VoIPService.this, VoIPFeedbackActivity.class) .putExtra("call_id", call.id) .putExtra("call_access_hash", call.access_hash) + .putExtra("call_video", call.video) .putExtra("account", currentAccount) .addFlags(Intent.FLAG_ACTIVITY_NEW_TASK | Intent.FLAG_ACTIVITY_SINGLE_TOP), 0).send(); } catch (Exception x) { diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/voip/AcceptDeclineView.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/voip/AcceptDeclineView.java index dc7ae89f2..7e7ec266a 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/voip/AcceptDeclineView.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/voip/AcceptDeclineView.java @@ -76,6 +76,12 @@ public class AcceptDeclineView extends View { boolean retryMod; Drawable rippleDrawable; + private boolean screenWasWakeup; + Paint linePaint = new Paint(Paint.ANTI_ALIAS_FLAG); + + Drawable arrowDrawable; + + float arrowProgress; public AcceptDeclineView(@NonNull Context context) { super(context); @@ -107,12 +113,14 @@ public class AcceptDeclineView extends View { cancelDrawable = ContextCompat.getDrawable(context, R.drawable.ic_close_white).mutate(); cancelDrawable.setColorFilter(new PorterDuffColorFilter(Color.BLACK, PorterDuff.Mode.MULTIPLY)); - acceptCirclePaint.setColor(0x3f45bc4d); rippleDrawable = Theme.createSimpleSelectorCircleDrawable(AndroidUtilities.dp(52), 0, ColorUtils.setAlphaComponent(Color.WHITE, (int) (255 * 0.3f))); rippleDrawable.setCallback(this); + + arrowDrawable = ContextCompat.getDrawable(context, R.drawable.call_arrow_right); } + @Override protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { super.onMeasure(widthMeasureSpec, heightMeasureSpec); @@ -122,6 +130,8 @@ public class AcceptDeclineView extends View { callDrawable.setBounds(padding, padding, padding + AndroidUtilities.dp(28), padding + AndroidUtilities.dp(28)); cancelDrawable.setBounds(padding, padding, padding + AndroidUtilities.dp(28), padding + AndroidUtilities.dp(28)); + linePaint.setStrokeWidth(AndroidUtilities.dp(3)); + linePaint.setColor(Color.WHITE); } @Override @@ -200,7 +210,7 @@ public class AcceptDeclineView extends View { animator.start(); leftAnimator = animator; if (listener != null) { - if ((!startDrag && Math.abs(dy) < touchSlop) || leftOffsetX > maxOffset * 0.8f) { + if ((!startDrag && Math.abs(dy) < touchSlop && !screenWasWakeup) || leftOffsetX > maxOffset * 0.8f) { listener.onDicline(); } } @@ -214,7 +224,7 @@ public class AcceptDeclineView extends View { animator.start(); rightAnimator = animator; if (listener != null) { - if ((!startDrag && Math.abs(dy) < touchSlop) || -rigthOffsetX > maxOffset * 0.8f) { + if ((!startDrag && Math.abs(dy) < touchSlop && !screenWasWakeup) || -rigthOffsetX > maxOffset * 0.8f) { listener.onAccept(); } } @@ -263,6 +273,49 @@ public class AcceptDeclineView extends View { invalidate(); } + + float k = 0.6f; + if (screenWasWakeup && !retryMod) { + + arrowProgress += 16 / 1500f; + if (arrowProgress > 1) { + arrowProgress = 0; + } + + int cY = (int) (AndroidUtilities.dp(40) + buttonWidth / 2f); + float startX = AndroidUtilities.dp(46) + buttonWidth + AndroidUtilities.dp(8); + float endX = getMeasuredWidth() / 2f - AndroidUtilities.dp(8); + + float lineLength = AndroidUtilities.dp(10); + + float stepProgress = (1f - k) / 3f; + for (int i = 0; i < 3; i++) { + int x = (int) (startX + (endX - startX - lineLength) / 3 * i); + + float alpha = 0.5f; + float startAlphaFrom = i * stepProgress; + if (arrowProgress > startAlphaFrom && arrowProgress < startAlphaFrom + k) { + float p = (arrowProgress - startAlphaFrom) / k; + if (p > 0.5) p = 1f - p; + alpha = 0.5f + p; + } + canvas.save(); + canvas.clipRect(leftOffsetX + AndroidUtilities.dp(46) + buttonWidth / 2,0,getMeasuredHeight(),getMeasuredWidth() >> 1); + arrowDrawable.setAlpha((int) (255 * alpha)); + arrowDrawable.setBounds(x, cY - arrowDrawable.getIntrinsicHeight() / 2, x + arrowDrawable.getIntrinsicWidth(), cY + arrowDrawable.getIntrinsicHeight() / 2); + arrowDrawable.draw(canvas); + canvas.restore(); + + x = (int) (getMeasuredWidth() - (startX + (endX - startX - lineLength) / 3 * i)); + canvas.save(); + canvas.clipRect(getMeasuredWidth() >> 1, 0, rigthOffsetX + getMeasuredWidth() - AndroidUtilities.dp(46) - buttonWidth / 2, getMeasuredHeight()); + canvas.rotate(180, x - arrowDrawable.getIntrinsicWidth() / 2f, cY); + arrowDrawable.setBounds(x - arrowDrawable.getIntrinsicWidth(), cY - arrowDrawable.getIntrinsicHeight() / 2, x, cY + arrowDrawable.getIntrinsicHeight() / 2); + arrowDrawable.draw(canvas); + canvas.restore(); + } + invalidate(); + } bigRadius += AndroidUtilities.dp(8) * 0.005f; canvas.save(); canvas.translate(0, AndroidUtilities.dp(40)); @@ -330,6 +383,7 @@ public class AcceptDeclineView extends View { public interface Listener { void onAccept(); + void onDicline(); } @@ -337,6 +391,7 @@ public class AcceptDeclineView extends View { this.retryMod = retryMod; if (retryMod) { declineDrawable.setColor(Color.WHITE); + screenWasWakeup = false; } else { declineDrawable.setColor(0xFFe61e44); } @@ -421,6 +476,10 @@ public class AcceptDeclineView extends View { return accessibilityNodeProvider; } + public void setScreenWasWakeup(boolean screenWasWakeup) { + this.screenWasWakeup = screenWasWakeup; + } + private static abstract class AcceptDeclineAccessibilityNodeProvider extends AccessibilityNodeProvider { private final View hostView; @@ -514,7 +573,9 @@ public class AcceptDeclineView extends View { } protected abstract CharSequence getVirtualViewText(int virtualViewId); + protected abstract void getVirtualViewBoundsInScreen(int virtualViewId, Rect outRect); + protected abstract void getVirtualViewBoundsInParent(int virtualViewId, Rect outRect); } } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/voip/VoIPHelper.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/voip/VoIPHelper.java index 6fd72cdae..bc03aaab9 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/voip/VoIPHelper.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/voip/VoIPHelper.java @@ -82,7 +82,11 @@ public class VoIPHelper { bldr.setNeutralButton(LocaleController.getString("VoipOfflineOpenSettings", R.string.VoipOfflineOpenSettings), (dialog, which) -> activity.startActivity(settingsIntent)); } } - bldr.show(); + try { + bldr.show(); + } catch (Exception e) { + FileLog.e(e); + } return; } @@ -211,7 +215,7 @@ public class VoIPHelper { if (d[0].equals(call.call_id + "")) { try { long accessHash = Long.parseLong(d[1]); - showRateAlert(context, null, call.call_id, accessHash, UserConfig.selectedAccount, true); + showRateAlert(context, null, call.video, call.call_id, accessHash, UserConfig.selectedAccount, true); } catch (Exception ignore) { } return; @@ -219,7 +223,7 @@ public class VoIPHelper { } } - public static void showRateAlert(final Context context, final Runnable onDismiss, final long callID, final long accessHash, final int account, final boolean userInitiative) { + public static void showRateAlert(final Context context, final Runnable onDismiss, boolean isVideo, final long callID, final long accessHash, final int account, final boolean userInitiative) { final File log = getLogFile(callID); final int[] page = {0}; LinearLayout alertView = new LinearLayout(context); @@ -246,32 +250,41 @@ public class VoIPHelper { check.setChecked(!check.isChecked(), true); }; - final String[] problems = {"echo", "noise", "interruptions", "distorted_speech", "silent_local", "silent_remote", "dropped"}; + final String[] problems = {isVideo ? "distorted_video" : null, isVideo ? "pixelated_video" : null, "echo", "noise", "interruptions", "distorted_speech", "silent_local", "silent_remote", "dropped"}; for (int i = 0; i < problems.length; i++) { + if (problems[i] == null) { + continue; + } CheckBoxCell check = new CheckBoxCell(context, 1); check.setClipToPadding(false); check.setTag(problems[i]); String label = null; switch (i) { case 0: - label = LocaleController.getString("RateCallEcho", R.string.RateCallEcho); + label = LocaleController.getString("RateCallVideoDistorted", R.string.RateCallVideoDistorted); break; case 1: - label = LocaleController.getString("RateCallNoise", R.string.RateCallNoise); + label = LocaleController.getString("RateCallVideoPixelated", R.string.RateCallVideoPixelated); break; case 2: - label = LocaleController.getString("RateCallInterruptions", R.string.RateCallInterruptions); + label = LocaleController.getString("RateCallEcho", R.string.RateCallEcho); break; case 3: - label = LocaleController.getString("RateCallDistorted", R.string.RateCallDistorted); + label = LocaleController.getString("RateCallNoise", R.string.RateCallNoise); break; case 4: - label = LocaleController.getString("RateCallSilentLocal", R.string.RateCallSilentLocal); + label = LocaleController.getString("RateCallInterruptions", R.string.RateCallInterruptions); break; case 5: - label = LocaleController.getString("RateCallSilentRemote", R.string.RateCallSilentRemote); + label = LocaleController.getString("RateCallDistorted", R.string.RateCallDistorted); break; case 6: + label = LocaleController.getString("RateCallSilentLocal", R.string.RateCallSilentLocal); + break; + case 7: + label = LocaleController.getString("RateCallSilentRemote", R.string.RateCallSilentRemote); + break; + case 8: label = LocaleController.getString("RateCallDropped", R.string.RateCallDropped); break; } @@ -367,10 +380,11 @@ public class VoIPHelper { problemTags.add("#" + check.getTag()); } - if (req.rating < 5) + if (req.rating < 5) { req.comment = commentBox.getText().toString(); - else + } else { req.comment = ""; + } if (!problemTags.isEmpty() && !includeLogs[0]) { req.comment += " " + TextUtils.join(" ", problemTags); } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/voip/VoIPPiPView.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/voip/VoIPPiPView.java index 2afa97796..9eb34e383 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/voip/VoIPPiPView.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/voip/VoIPPiPView.java @@ -34,6 +34,7 @@ import org.telegram.messenger.AndroidUtilities; import org.telegram.messenger.ApplicationLoader; import org.telegram.messenger.FileLog; import org.telegram.messenger.LocaleController; +import org.telegram.messenger.NotificationCenter; import org.telegram.messenger.R; import org.telegram.messenger.voip.Instance; import org.telegram.messenger.voip.VideoCameraCapturer; @@ -44,7 +45,7 @@ import org.telegram.ui.Components.LayoutHelper; import org.telegram.ui.LaunchActivity; import org.telegram.ui.VoIPFragment; -public class VoIPPiPView implements VoIPBaseService.StateListener { +public class VoIPPiPView implements VoIPBaseService.StateListener, NotificationCenter.NotificationCenterDelegate { public final static int ANIMATION_ENTER_TYPE_SCALE = 0; public final static int ANIMATION_ENTER_TYPE_TRANSITION = 1; @@ -148,6 +149,7 @@ public class VoIPPiPView implements VoIPBaseService.StateListener { float y = preferences.getFloat("relativeY", 0f); instance.setRelativePosition(x, y); + NotificationCenter.getGlobalInstance().addObserver(instance, NotificationCenter.didEndCall); wm.addView(instance.windowView, windowLayoutParams); instance.currentUserTextureView.renderer.init(VideoCameraCapturer.eglBase.getEglBaseContext(), null); @@ -324,6 +326,8 @@ public class VoIPPiPView implements VoIPBaseService.StateListener { VoIPService service = VoIPService.getSharedInstance(); if (service != null) { service.hangUp(); + } else { + finish(); } }); @@ -353,23 +357,24 @@ public class VoIPPiPView implements VoIPBaseService.StateListener { if (service != null) { service.unregisterStateListener(this); } - floatingView.getRelativePosition(point); - float x = Math.min(1f, Math.max(0f, point[0])); - float y = Math.min(1f, Math.max(0f, point[1])); - SharedPreferences preferences = ApplicationLoader.applicationContext.getSharedPreferences("voippipconfig", Context.MODE_PRIVATE); - preferences.edit() - .putFloat("relativeX", x) - .putFloat("relativeY", y) - .apply(); windowView.setVisibility(View.GONE); if (windowView.getParent() != null) { + floatingView.getRelativePosition(point); + float x = Math.min(1f, Math.max(0f, point[0])); + float y = Math.min(1f, Math.max(0f, point[1])); + SharedPreferences preferences = ApplicationLoader.applicationContext.getSharedPreferences("voippipconfig", Context.MODE_PRIVATE); + preferences.edit() + .putFloat("relativeX", x) + .putFloat("relativeY", y) + .apply(); + try { windowManager.removeView(windowView); } catch (Throwable e) { FileLog.e(e); } } - instance = null; + NotificationCenter.getGlobalInstance().removeObserver(this, NotificationCenter.didEndCall); } @Override @@ -477,6 +482,13 @@ public class VoIPPiPView implements VoIPBaseService.StateListener { } } + @Override + public void didReceivedNotification(int id, int account, Object... args) { + if (id == NotificationCenter.didEndCall) { + finish(); + } + } + private class FloatingView extends FrameLayout { @@ -762,12 +774,7 @@ public class VoIPPiPView implements VoIPBaseService.StateListener { return; } expandedInstance.windowView.setAlpha(0); - try { - windowManager.removeView(expandedInstance.windowView); - } catch (Throwable e) { - FileLog.e(e); - } - expandedInstance = null; + expandedInstance.finishInternal(); expandedAnimationInProgress = false; if (expanded) { AndroidUtilities.runOnUIThread(collapseRunnable, 3000); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/voip/VoIPStatusTextView.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/voip/VoIPStatusTextView.java index 88dec3302..2f2cd7c99 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/voip/VoIPStatusTextView.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/voip/VoIPStatusTextView.java @@ -19,18 +19,21 @@ import android.widget.TextView; import androidx.annotation.NonNull; import org.telegram.messenger.AndroidUtilities; +import org.telegram.messenger.LocaleController; +import org.telegram.messenger.R; import org.telegram.ui.Components.CubicBezierInterpolator; import org.telegram.ui.Components.LayoutHelper; public class VoIPStatusTextView extends FrameLayout { TextView[] textView = new TextView[2]; + TextView reconnectTextView; VoIPTimerView timerView; CharSequence nextTextToSet; boolean animationInProgress; - private TextAlphaSpan[] ellSpans = ellSpans = new TextAlphaSpan[]{new TextAlphaSpan(), new TextAlphaSpan(), new TextAlphaSpan()}; + private TextAlphaSpan[] ellSpans = new TextAlphaSpan[]{new TextAlphaSpan(), new TextAlphaSpan(), new TextAlphaSpan()}; private AnimatorSet ellAnimator; private boolean attachedToWindow; @@ -47,6 +50,23 @@ public class VoIPStatusTextView extends FrameLayout { textView[i].setGravity(Gravity.CENTER_HORIZONTAL); addView(textView[i]); } + + reconnectTextView = new TextView(context); + reconnectTextView.setTextSize(15); + reconnectTextView.setShadowLayer(AndroidUtilities.dp(3), 0, AndroidUtilities.dp(.666666667f), 0x4C000000); + reconnectTextView.setTextColor(Color.WHITE); + reconnectTextView.setGravity(Gravity.CENTER_HORIZONTAL); + addView(reconnectTextView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, 0, 0, 22, 0, 0)); + + SpannableStringBuilder ssb = new SpannableStringBuilder(LocaleController.getString("VoipReconnecting", R.string.VoipReconnecting)); + SpannableString ell = new SpannableString("..."); + ell.setSpan(ellSpans[0], 0, 1, 0); + ell.setSpan(ellSpans[1], 1, 2, 0); + ell.setSpan(ellSpans[2], 2, 3, 0); + ssb.append(ell); + reconnectTextView.setText(ssb); + reconnectTextView.setVisibility(View.GONE); + timerView = new VoIPTimerView(context); addView(timerView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT)); @@ -97,7 +117,6 @@ public class VoIPStatusTextView extends FrameLayout { animated = false; } - if (!animated) { if (animator != null) { animator.cancel(); @@ -230,6 +249,9 @@ public class VoIPStatusTextView extends FrameLayout { textView[0].invalidate(); textView[1].invalidate(); } + if (reconnectTextView.getVisibility() == View.VISIBLE) { + reconnectTextView.invalidate(); + } }); a.setDuration(duration); a.setStartDelay(startDelay); @@ -237,6 +259,29 @@ public class VoIPStatusTextView extends FrameLayout { return a; } + public void showReconnect(boolean showReconnecting, boolean animated) { + if (!animated) { + reconnectTextView.animate().setListener(null).cancel(); + reconnectTextView.setVisibility(showReconnecting ? View.VISIBLE : View.GONE); + } else { + if (showReconnecting) { + if (reconnectTextView.getVisibility() != View.VISIBLE) { + reconnectTextView.setVisibility(View.VISIBLE); + reconnectTextView.setAlpha(0); + } + reconnectTextView.animate().setListener(null).cancel(); + reconnectTextView.animate().alpha(1f).setDuration(150).start(); + } else { + reconnectTextView.animate().alpha(0).setListener(new AnimatorListenerAdapter() { + @Override + public void onAnimationEnd(Animator animation) { + reconnectTextView.setVisibility(View.GONE); + } + }).setDuration(150).start(); + } + } + } + private class TextAlphaSpan extends CharacterStyle { private int alpha; diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/voip/VoIPToggleButton.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/voip/VoIPToggleButton.java index 0eb9c1ce9..82fd55a4e 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/voip/VoIPToggleButton.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/voip/VoIPToggleButton.java @@ -245,6 +245,8 @@ public class VoIPToggleButton extends FrameLayout { textView[0].setText(text); crossProgress = drawCross ? 1f : 0; iconChangeColor = false; + replaceProgress = 0f; + invalidate(); } else { if (!iconChangeColor) { icon[1] = ContextCompat.getDrawable(getContext(), iconRes).mutate(); @@ -299,6 +301,7 @@ public class VoIPToggleButton extends FrameLayout { } }); replaceAnimator.setDuration(150).start(); + invalidate(); } } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/LaunchActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/LaunchActivity.java index af235f564..a4be720d0 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/LaunchActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/LaunchActivity.java @@ -673,7 +673,7 @@ public class LaunchActivity extends Activity implements ActionBarLayout.ActionBa if (BuildVars.LOGS_ENABLED) { FileLog.d("OS name " + os1 + " " + os2); } - if (os1.contains("flyme") || os2.contains("flyme")) { + if ((os1.contains("flyme") || os2.contains("flyme")) && Build.VERSION.SDK_INT <= 24) { AndroidUtilities.incorrectDisplaySizeFix = true; final View view = getWindow().getDecorView().getRootView(); view.getViewTreeObserver().addOnGlobalLayoutListener(onGlobalLayoutListener = () -> { diff --git a/TMessagesProj/src/main/java/org/telegram/ui/PaymentFormActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/PaymentFormActivity.java index 3636b4f51..aacccf380 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/PaymentFormActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/PaymentFormActivity.java @@ -364,7 +364,7 @@ public class PaymentFormActivity extends BaseFragment implements NotificationCen currentItemName = message.messageOwner.media.title; validateRequest = request; saveShippingInfo = true; - if (saveCard) { + if (saveCard || currentStep == 4) { saveCardInfo = saveCard; } else { saveCardInfo = paymentForm.saved_credentials != null; diff --git a/TMessagesProj/src/main/java/org/telegram/ui/ProfileActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/ProfileActivity.java index f27e84e72..daffa2c48 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/ProfileActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/ProfileActivity.java @@ -3415,6 +3415,10 @@ public class ProfileActivity extends BaseFragment implements NotificationCenter. color2 = Theme.getColor(Theme.key_windowBackgroundWhite); verifiedCheckDrawable.setColorFilter(AndroidUtilities.getOffsetColor(color1, color2, value, 1.0f), PorterDuff.Mode.MULTIPLY); } + + if (avatarsViewPagerIndicatorView.getSecondaryMenuItem() != null && videoCallItemVisible) { + needLayoutText(Math.min(1f, extraHeight / AndroidUtilities.dp(88f))); + } } @Override @@ -3932,7 +3936,8 @@ public class ProfileActivity extends BaseFragment implements NotificationCenter. } int viewWidth = AndroidUtilities.isTablet() ? AndroidUtilities.dp(490) : AndroidUtilities.displaySize.x; - int buttonsWidth = AndroidUtilities.dp(118 + 8 + (40 + (callItemVisible || editItemVisible || searchItem != null ? 48 : 0) + (videoCallItemVisible ? 48 : 0))); + ActionBarMenuItem item = avatarsViewPagerIndicatorView.getSecondaryMenuItem(); + int buttonsWidth = AndroidUtilities.dp(118 + 8 + (40 + (item != null ? 48 * (1.0f - mediaHeaderAnimationProgress) : 0) + (videoCallItemVisible ? 48 * (1.0f - mediaHeaderAnimationProgress) : 0))); int minWidth = viewWidth - buttonsWidth; int width = (int) (viewWidth - buttonsWidth * Math.max(0.0f, 1.0f - (diff != 1.0f ? diff * 0.15f / (1.0f - diff) : 1.0f)) - nameTextView[1].getTranslationX()); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/VoIPFeedbackActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/VoIPFeedbackActivity.java index 7b5edf0b7..5d1a2f89b 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/VoIPFeedbackActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/VoIPFeedbackActivity.java @@ -16,12 +16,7 @@ public class VoIPFeedbackActivity extends Activity { setContentView(new View(this)); - VoIPHelper.showRateAlert(this, new Runnable(){ - @Override - public void run(){ - finish(); - } - }, getIntent().getLongExtra("call_id", 0), getIntent().getLongExtra("call_access_hash", 0), getIntent().getIntExtra("account", 0), false); + VoIPHelper.showRateAlert(this, this::finish, getIntent().getBooleanExtra("call_video", false), getIntent().getLongExtra("call_id", 0), getIntent().getLongExtra("call_access_hash", 0), getIntent().getIntExtra("account", 0), false); } @Override diff --git a/TMessagesProj/src/main/java/org/telegram/ui/VoIPFragment.java b/TMessagesProj/src/main/java/org/telegram/ui/VoIPFragment.java index 5bffdd119..eb18e8bf1 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/VoIPFragment.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/VoIPFragment.java @@ -202,6 +202,9 @@ public class VoIPFragment implements VoIPBaseService.StateListener, Notification } }; private boolean lockOnScreen; + private boolean screenWasWakeup; + + private boolean isVideoCall; public static void show(Activity activity) { show(activity, false); @@ -252,6 +255,15 @@ public class VoIPFragment implements VoIPBaseService.StateListener, Notification } }; instance.deviceIsLocked = ((KeyguardManager) activity.getSystemService(Context.KEYGUARD_SERVICE)).inKeyguardRestrictedInputMode(); + + PowerManager pm = (PowerManager) activity.getSystemService(Context.POWER_SERVICE); + boolean screenOn; + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT_WATCH) { + screenOn = pm.isInteractive(); + } else { + screenOn = pm.isScreenOn(); + } + instance.screenWasWakeup = !screenOn; windowView.setLockOnScreen(instance.deviceIsLocked); fragment.windowView = windowView; @@ -406,6 +418,10 @@ public class VoIPFragment implements VoIPBaseService.StateListener, Notification public void didReceivedNotification(int id, int account, Object... args) { if (id == NotificationCenter.voipServiceCreated) { if (currentState == VoIPService.STATE_BUSY && VoIPService.getSharedInstance() != null) { + currentUserTextureView.renderer.release(); + callingUserTextureView.renderer.release(); + callingUserMiniTextureRenderer.release(); + initRenderers(); VoIPService.getSharedInstance().registerStateListener(this); } } @@ -430,6 +446,9 @@ public class VoIPFragment implements VoIPBaseService.StateListener, Notification @Override public void onMediaStateUpdated(int audioState, int videoState) { previousState = currentState; + if (videoState == Instance.VIDEO_STATE_ACTIVE && !isVideoCall) { + isVideoCall = true; + } updateViewState(); } @@ -442,6 +461,9 @@ public class VoIPFragment implements VoIPBaseService.StateListener, Notification @Override public void onVideoAvailableChange(boolean isAvailable) { previousState = currentState; + if (isAvailable && !isVideoCall) { + isVideoCall = true; + } updateViewState(); } @@ -559,7 +581,7 @@ public class VoIPFragment implements VoIPBaseService.StateListener, Notification lastContentTapTime = System.currentTimeMillis(); callingUserMiniFloatingLayout.setRelativePosition(currentUserCameraFloatingLayout); cameraForceExpanded = true; - currentState = previousState; + previousState = currentState; updateViewState(); } }); @@ -585,7 +607,7 @@ public class VoIPFragment implements VoIPBaseService.StateListener, Notification lastContentTapTime = System.currentTimeMillis(); currentUserCameraFloatingLayout.setRelativePosition(callingUserMiniFloatingLayout); cameraForceExpanded = false; - currentState = previousState; + previousState = currentState; updateViewState(); } }); @@ -700,7 +722,6 @@ public class VoIPFragment implements VoIPBaseService.StateListener, Notification bottomButtons[i] = new VoIPToggleButton(context); buttonsLayout.addView(bottomButtons[i]); } - acceptDeclineView = new AcceptDeclineView(context); acceptDeclineView.setListener(new AcceptDeclineView.Listener() { @Override @@ -710,6 +731,8 @@ public class VoIPFragment implements VoIPBaseService.StateListener, Notification intent.putExtra("user_id", callingUser.id); intent.putExtra("is_outgoing", true); intent.putExtra("start_incall_activity", false); + intent.putExtra("video_call", isVideoCall); + intent.putExtra("can_video_call", isVideoCall); intent.putExtra("account", UserConfig.selectedAccount); try { activity.startService(intent); @@ -741,6 +764,7 @@ public class VoIPFragment implements VoIPBaseService.StateListener, Notification } } }); + acceptDeclineView.setScreenWasWakeup(screenWasWakeup); frameLayout.addView(buttonsLayout, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, Gravity.BOTTOM)); frameLayout.addView(acceptDeclineView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 186, Gravity.BOTTOM)); @@ -789,42 +813,47 @@ public class VoIPFragment implements VoIPBaseService.StateListener, Notification VoIPService service = VoIPService.getSharedInstance(); if (service != null) { - - if (VideoCameraCapturer.eglBase == null) { - VideoCameraCapturer.eglBase = EglBase.create(null, EglBase.CONFIG_PLAIN); + if (!isVideoCall) { + isVideoCall = service.call != null && service.call.video; } - - currentUserTextureView.renderer.init(VideoCameraCapturer.eglBase.getEglBaseContext(), new RendererCommon.RendererEvents() { - @Override - public void onFirstFrameRendered() { - AndroidUtilities.runOnUIThread(() -> updateViewState()); - } - - @Override - public void onFrameResolutionChanged(int videoWidth, int videoHeight, int rotation) { - - } - - }); - callingUserTextureView.renderer.init(VideoCameraCapturer.eglBase.getEglBaseContext(), new RendererCommon.RendererEvents() { - @Override - public void onFirstFrameRendered() { - AndroidUtilities.runOnUIThread(() -> updateViewState()); - } - - @Override - public void onFrameResolutionChanged(int videoWidth, int videoHeight, int rotation) { - - } - - }, EglBase.CONFIG_PLAIN, new GlRectDrawer()); - - callingUserMiniTextureRenderer.init(VideoCameraCapturer.eglBase.getEglBaseContext(), null); + initRenderers(); } return frameLayout; } + private void initRenderers() { + if (VideoCameraCapturer.eglBase == null) { + VideoCameraCapturer.eglBase = EglBase.create(null, EglBase.CONFIG_PLAIN); + } + currentUserTextureView.renderer.init(VideoCameraCapturer.eglBase.getEglBaseContext(), new RendererCommon.RendererEvents() { + @Override + public void onFirstFrameRendered() { + AndroidUtilities.runOnUIThread(() -> updateViewState()); + } + + @Override + public void onFrameResolutionChanged(int videoWidth, int videoHeight, int rotation) { + + } + + }); + callingUserTextureView.renderer.init(VideoCameraCapturer.eglBase.getEglBaseContext(), new RendererCommon.RendererEvents() { + @Override + public void onFirstFrameRendered() { + AndroidUtilities.runOnUIThread(() -> updateViewState()); + } + + @Override + public void onFrameResolutionChanged(int videoWidth, int videoHeight, int rotation) { + + } + + }, EglBase.CONFIG_PLAIN, new GlRectDrawer()); + + callingUserMiniTextureRenderer.init(VideoCameraCapturer.eglBase.getEglBaseContext(), null); + } + public void switchToPip() { if (isFinished || !AndroidUtilities.checkInlinePermissions(activity) || instance == null) { return; @@ -907,6 +936,7 @@ public class VoIPFragment implements VoIPBaseService.StateListener, Notification notificationsLayout.setAlpha(0f); callingUserPhotoView.setAlpha(0f); + currentUserCameraFloatingLayout.switchingToPip = true; AndroidUtilities.runOnUIThread(() -> { VoIPPiPView.switchingToPip = false; VoIPPiPView.finish(); @@ -921,7 +951,6 @@ public class VoIPFragment implements VoIPBaseService.StateListener, Notification notificationsLayout.animate().alpha(1f).setDuration(350).setInterpolator(CubicBezierInterpolator.DEFAULT).start(); callingUserPhotoView.animate().alpha(1f).setDuration(350).setInterpolator(CubicBezierInterpolator.DEFAULT).start(); - currentUserCameraFloatingLayout.switchingToPip = true; animator.addListener(new AnimatorListenerAdapter() { @Override public void onAnimationEnd(Animator animation) { @@ -936,8 +965,8 @@ public class VoIPFragment implements VoIPBaseService.StateListener, Notification animator.setDuration(350); animator.setInterpolator(CubicBezierInterpolator.DEFAULT); animator.start(); - }, 64); - }, 64); + }, 32); + }, 32); } @@ -1113,6 +1142,7 @@ public class VoIPFragment implements VoIPBaseService.StateListener, Notification boolean animated = previousState != -1; boolean showAcceptDeclineView = false; boolean showTimer = false; + boolean showReconnecting = false; boolean showCallingAvatarMini = false; int statusLayoutOffset = 0; @@ -1120,6 +1150,9 @@ public class VoIPFragment implements VoIPBaseService.StateListener, Notification if (service != null) { callingUserIsVideo = service.getCurrentVideoState() == Instance.VIDEO_STATE_ACTIVE; currentUserIsVideo = service.getVideoState() == Instance.VIDEO_STATE_ACTIVE || service.getVideoState() == Instance.VIDEO_STATE_PAUSED; + if (currentUserIsVideo && !isVideoCall) { + isVideoCall = true; + } } if (animated) { @@ -1134,7 +1167,7 @@ public class VoIPFragment implements VoIPBaseService.StateListener, Notification statusLayoutOffset = AndroidUtilities.dp(24); acceptDeclineView.setRetryMod(false); if (service != null && service.call.video) { - if (currentUserIsVideo) { + if (currentUserIsVideo && callingUser.photo != null) { showCallingAvatarMini = true; } else { showCallingAvatarMini = false; @@ -1177,6 +1210,9 @@ public class VoIPFragment implements VoIPBaseService.StateListener, Notification updateKeyView(animated); } showTimer = true; + if (currentState == VoIPService.STATE_RECONNECTING) { + showReconnecting = true; + } break; case VoIPBaseService.STATE_ENDED: currentUserTextureView.saveCameraLastBitmap(); @@ -1192,9 +1228,42 @@ public class VoIPFragment implements VoIPBaseService.StateListener, Notification final String message = LocaleController.formatString("VoipPeerIncompatible", R.string.VoipPeerIncompatible, name); showErrorDialog(AndroidUtilities.replaceTags(message)); } else if (TextUtils.equals(lastError, Instance.ERROR_PEER_OUTDATED)) { - final String name = ContactsController.formatName(callingUser.first_name, callingUser.last_name); - final String message = LocaleController.formatString("VoipPeerOutdated", R.string.VoipPeerOutdated, name); - showErrorDialog(AndroidUtilities.replaceTags(message)); + if (isVideoCall) { + final String name = UserObject.getFirstName(callingUser); + final String message = LocaleController.formatString("VoipPeerVideoOutdated", R.string.VoipPeerVideoOutdated, name); + boolean[] callAgain = new boolean[1]; + AlertDialog dlg = new DarkAlertDialog.Builder(activity) + .setTitle(LocaleController.getString("VoipFailed", R.string.VoipFailed)) + .setMessage(AndroidUtilities.replaceTags(message)) + .setNegativeButton(LocaleController.getString("Cancel", R.string.Cancel), (dialogInterface, i) -> windowView.finish()) + .setPositiveButton(LocaleController.getString("VoipPeerVideoOutdatedMakeVoice", R.string.VoipPeerVideoOutdatedMakeVoice), (dialogInterface, i) -> { + callAgain[0] = true; + currentState = VoIPService.STATE_BUSY; + Intent intent = new Intent(activity, VoIPService.class); + intent.putExtra("user_id", callingUser.id); + intent.putExtra("is_outgoing", true); + intent.putExtra("start_incall_activity", false); + intent.putExtra("video_call", false); + intent.putExtra("can_video_call", false); + intent.putExtra("account", UserConfig.selectedAccount); + try { + activity.startService(intent); + } catch (Throwable e) { + FileLog.e(e); + } + }) + .show(); + dlg.setCanceledOnTouchOutside(true); + dlg.setOnDismissListener(dialog -> { + if (!callAgain[0]) { + windowView.finish(); + } + }); + } else { + final String name = UserObject.getFirstName(callingUser); + final String message = LocaleController.formatString("VoipPeerOutdated", R.string.VoipPeerOutdated, name); + showErrorDialog(AndroidUtilities.replaceTags(message)); + } } else if (TextUtils.equals(lastError, Instance.ERROR_PRIVACY)) { final String name = ContactsController.formatName(callingUser.first_name, callingUser.last_name); final String message = LocaleController.formatString("CallNotAvailable", R.string.CallNotAvailable, name); @@ -1286,12 +1355,17 @@ public class VoIPFragment implements VoIPBaseService.StateListener, Notification backIcon.setAlpha(lockOnScreen ? 0 : 1f); notificationsLayout.setTranslationY(-AndroidUtilities.dp(16) - (uiVisible ? AndroidUtilities.dp(80) : 0)); } - updateButtons(animated); + + if (currentState != VoIPService.STATE_HANGING_UP && currentState != VoIPService.STATE_ENDED) { + updateButtons(animated); + } if (showTimer) { statusTextView.showTimer(animated); } + statusTextView.showReconnect(showReconnecting, animated); + if (animated) { if (statusLayoutOffset != statusLayoutAnimateToOffset) { statusLayout.animate().translationY(statusLayoutOffset).setDuration(150).setInterpolator(CubicBezierInterpolator.DEFAULT).start(); @@ -1749,6 +1823,7 @@ public class VoIPFragment implements VoIPBaseService.StateListener, Notification view.announceForAccessibility(text); } serviceInstance.setMicMute(micMute); + previousState = currentState; updateViewState(); } }); diff --git a/TMessagesProj/src/main/java/org/webrtc/GlShader.java b/TMessagesProj/src/main/java/org/webrtc/GlShader.java index bb1ffc558..19f395fbb 100644 --- a/TMessagesProj/src/main/java/org/webrtc/GlShader.java +++ b/TMessagesProj/src/main/java/org/webrtc/GlShader.java @@ -63,9 +63,9 @@ public class GlShader { // part of the program object." // But in practice, detaching shaders from the program seems to break some devices. Deleting the // shaders are fine however - it will delete them when they are no longer attached to a program. - GLES20.glDeleteShader(vertexShader); - GLES20.glDeleteShader(fragmentShader); - GlUtil.checkNoGLES2Error("Creating GlShader"); + //GLES20.glDeleteShader(vertexShader); delete crashes on xiaomi + //GLES20.glDeleteShader(fragmentShader); + //GlUtil.checkNoGLES2Error("Creating GlShader"); } public int getAttribLocation(String label) { diff --git a/TMessagesProj/src/main/java/org/webrtc/HardwareVideoDecoderFactory.java b/TMessagesProj/src/main/java/org/webrtc/HardwareVideoDecoderFactory.java index 215598a85..a5563d653 100644 --- a/TMessagesProj/src/main/java/org/webrtc/HardwareVideoDecoderFactory.java +++ b/TMessagesProj/src/main/java/org/webrtc/HardwareVideoDecoderFactory.java @@ -11,8 +11,10 @@ package org.webrtc; import android.media.MediaCodecInfo; + +import org.telegram.messenger.voip.Instance; + import androidx.annotation.Nullable; -import java.util.Arrays; /** Factory for Android hardware VideoDecoders. */ public class HardwareVideoDecoderFactory extends MediaCodecVideoDecoderFactory { @@ -20,7 +22,27 @@ public class HardwareVideoDecoderFactory extends MediaCodecVideoDecoderFactory { new Predicate() { @Override public boolean test(MediaCodecInfo arg) { - return MediaCodecUtils.isHardwareAccelerated(arg); + if (!MediaCodecUtils.isHardwareAccelerated(arg)) { + return false; + } + String[] types = arg.getSupportedTypes(); + if (types == null || types.length == 0) { + return false; + } + Instance.ServerConfig config = Instance.getGlobalServerConfig(); + for (int a = 0; a < types.length; a++) { + switch (types[a]) { + case "video/x-vnd.on2.vp8": + return config.enable_vp8_decoder; + case "video/x-vnd.on2.vp9": + return config.enable_vp9_decoder; + case "video/avc": + return config.enable_h264_decoder; + case "video/hevc": + return config.enable_h265_decoder; + } + } + return true; } }; diff --git a/TMessagesProj/src/main/java/org/webrtc/HardwareVideoEncoderFactory.java b/TMessagesProj/src/main/java/org/webrtc/HardwareVideoEncoderFactory.java index 92e100e60..8093aeece 100644 --- a/TMessagesProj/src/main/java/org/webrtc/HardwareVideoEncoderFactory.java +++ b/TMessagesProj/src/main/java/org/webrtc/HardwareVideoEncoderFactory.java @@ -18,12 +18,12 @@ import static org.webrtc.MediaCodecUtils.HISI_PREFIX; import android.media.MediaCodecInfo; import android.media.MediaCodecList; import android.os.Build; + +import org.telegram.messenger.voip.Instance; + import androidx.annotation.Nullable; import java.util.ArrayList; -import java.util.Arrays; -import java.util.HashSet; import java.util.List; -import java.util.Set; /** Factory for android hardware video encoders. */ @SuppressWarnings("deprecation") // API 16 requires the use of deprecated methods. @@ -40,63 +40,6 @@ public class HardwareVideoEncoderFactory implements VideoEncoderFactory { private final boolean enableH264HighProfile; @Nullable private final Predicate codecAllowedPredicate; - private static final List H264_HW_EXCEPTION_MODELS = - Arrays.asList("samsung-sgh-i337", "nexus7", "nexus4", "pixel3xl", "pixel3"); - - private static final List VP8_HW_EXCEPTION_MODELS = - Arrays.asList("pixel3xl", "pixel3"); - - private static Set HW_EXCEPTION_MODELS = new HashSet() {{ - add("sm-a310f"); - add("sm-a310f/ds"); - add("sm-a310y"); - add("sm-a310m"); - add("sm-g920f"); - add("sm-g920fd"); - add("sm-g920fq"); - add("sm-g920i"); - add("sm-g920a"); - add("sm-g920t"); - add("sm-g930f"); - add("sm-g930fd"); - add("sm-g930w8"); - add("sm-g930s"); - add("sm-g930k"); - add("sm-g930l"); - add("sm-g935f"); - add("sm-g935fd"); - add("sm-g935w8"); - add("sm-g935s"); - add("sm-g935k"); - add("sm-g935l"); - - add("i537"); - add("sgh-i537"); - add("gt-i9295"); - add("sgh-i337"); - add("gt-i9505g"); - add("gt-i9505"); - add("gt-i9515"); - add("f240"); - add("e980"); - add("ls980"); - add("e988"); - add("e986"); - add("f240l"); - add("f240s"); - add("v9815"); - add("nx403a"); - add("f310l"); - add("f310lr"); - add("onem7"); - add("onemax"); - add("pn071"); - add("htc6500lvw"); - add("butterflys"); - add("mi2s"); - add("n1"); - }}; - /** * Creates a HardwareVideoEncoderFactory that supports surface texture encoding. * @@ -249,13 +192,9 @@ public class HardwareVideoEncoderFactory implements VideoEncoderFactory { // Returns true if the given MediaCodecInfo indicates a hardware module that is supported on the // current SDK. - - private static String getModel() { - return Build.MODEL != null ? Build.MODEL.toLowerCase().replace(" ", "") : "nomodel"; - } - private boolean isHardwareSupportedInCurrentSdk(MediaCodecInfo info, VideoCodecMimeType type) { - if (HW_EXCEPTION_MODELS.contains(getModel())) { + Instance.ServerConfig config = Instance.getGlobalServerConfig(); + if (!config.enable_h264_encoder && !config.enable_h265_encoder && !config.enable_vp8_encoder && !config.enable_vp9_encoder) { return false; } switch (type) { @@ -272,7 +211,7 @@ public class HardwareVideoEncoderFactory implements VideoEncoderFactory { } private boolean isHardwareSupportedInCurrentSdkVp8(MediaCodecInfo info) { - if (VP8_HW_EXCEPTION_MODELS.contains(getModel())) { + if (!Instance.getGlobalServerConfig().enable_vp8_encoder) { return false; } String name = info.getName(); @@ -288,6 +227,9 @@ public class HardwareVideoEncoderFactory implements VideoEncoderFactory { } private boolean isHardwareSupportedInCurrentSdkVp9(MediaCodecInfo info) { + if (!Instance.getGlobalServerConfig().enable_vp9_encoder) { + return false; + } String name = info.getName(); return (name.startsWith(QCOM_PREFIX) || name.startsWith(EXYNOS_PREFIX) || name.startsWith(HISI_PREFIX)) // Both QCOM and Exynos VP9 encoders are supported in N or later. @@ -295,8 +237,7 @@ public class HardwareVideoEncoderFactory implements VideoEncoderFactory { } private boolean isHardwareSupportedInCurrentSdkH264(MediaCodecInfo info) { - // First, H264 hardware might perform poorly on this model. - if (H264_HW_EXCEPTION_MODELS.contains(getModel())) { + if (!Instance.getGlobalServerConfig().enable_h264_encoder) { return false; } String name = info.getName(); @@ -308,6 +249,9 @@ public class HardwareVideoEncoderFactory implements VideoEncoderFactory { } private boolean isHardwareSupportedInCurrentSdkH265(MediaCodecInfo info) { + if (!Instance.getGlobalServerConfig().enable_h265_encoder) { + return false; + } String name = info.getName(); // QCOM H265 encoder is supported in KITKAT or later. return (name.startsWith(QCOM_PREFIX) && Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT) diff --git a/TMessagesProj/src/main/res/drawable-hdpi/call_arrow_right.png b/TMessagesProj/src/main/res/drawable-hdpi/call_arrow_right.png new file mode 100644 index 0000000000000000000000000000000000000000..f7d24d9adc4cfc6d1b39a054d574a987bb0848d8 GIT binary patch literal 294 zcmV+>0oneEP)rK%-B1EXQ4L+K|#T87$|RH9jO8OZ0Swk6ZiEZGm_vr)|wpkd5Fyb*|z!Ezvn zk(!op0u4NlYy*(pLp2LP1_2Ee0peR|mgG^}5+$GoFOe+y3uFgT+Y%F?pk{LmdIuh+zbeuUml&=@@Ll6m(WjVlwrJ4DTvac*gp%6H)1*^$Q}yo(|9r$h9~ z?@R&4RSLHzKe1ZC_{3c~!1yO8=Zcw|mhhM~Y&zo-yNS%G{7PZ!4! z4%y@chNl1j|F3S$P!nKe+2_E>D#5PAqLg@wf%63C*5xxxDu8MkJYD@<);T3K0RSoY B7?}V7 literal 0 HcmV?d00001 diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/call_arrow_right.png b/TMessagesProj/src/main/res/drawable-xxhdpi/call_arrow_right.png new file mode 100644 index 0000000000000000000000000000000000000000..d936be51c940742896f32e5e66fb84ab7f2dcae2 GIT binary patch literal 516 zcmV+f0{i`mP)3{wkW4SqNt`Cuu81fX}r^uOj|6yUAto7`5HICLaFXx~(q=s6NL*{#Y- z05?sZIT8=FFDOgQ0(j*}T+`lEmY4_d-jTSVy`e0z0^pM)aZGz%Sz;Z)cSoWtD_~&2 zx5+O@qN6U+aRf}TtuE1ZB zxB|Khj&V(@{yrs-dkX;{yAvZ2cNhi!FB92T)wl3O*9SiCqXh7AALf9M`#24J+yxEr zahH_9$6nMU&?8HsyR8Cs+>I^Z<8Cbi-y*%u-TjGX!K!}*cYou haven\'t made any calls yet. **%1$s**\'s app is using an incompatible protocol. They need to update their app before you can call them. **%1$s**\'s app does not support calls. They need to update their app before you can call them. + Sorry, **%1$s** is using an old version of Telegram that doesn\'t support video calls. + Make a voice call Please rate the quality of your Telegram call Telegram needs access to your microphone so that you can make calls. Telegram needs access to your microphone and camera so that you can make video calls. @@ -3065,6 +3067,8 @@ I couldn\'t hear the other side The other side couldn\'t hear me Call ended unexpectedly + Video was distorted + Video was pixelated Tap here to turn on your camera Unmute Mute @@ -3676,4 +3680,5 @@ \'Remind today at\' HH:mm \'Remind on\' MMM d \'at\' HH:mm \'Remind on\' MMM d yyyy \'at\' HH:mm + Reconnecting