1
0
mirror of https://github.com/MGislv/NekoX.git synced 2024-07-02 10:33:36 +00:00

Custom group voip audio bitrate

This commit is contained in:
luvletter2333 2021-08-21 04:23:27 +08:00
parent 94a130346c
commit 891a10e69c
No known key found for this signature in database
GPG Key ID: BFD68B892BECC1D8
8 changed files with 99 additions and 23 deletions

View File

@ -342,7 +342,7 @@ void initWebRTC(JNIEnv *env) {
FinalStateInitMethod = env->GetMethodID(FinalStateClass, "<init>", "([BLjava/lang/String;Lorg/telegram/messenger/voip/Instance$TrafficStats;Z)V");
}
JNIEXPORT jlong JNICALL Java_org_telegram_messenger_voip_NativeInstance_makeGroupNativeInstance(JNIEnv *env, jclass clazz, jobject instanceObj, jstring logFilePath, jboolean highQuality, jlong videoCapturer, jboolean screencast, jboolean noiseSupression) {
JNIEXPORT jlong JNICALL Java_org_telegram_messenger_voip_NativeInstance_makeGroupNativeInstance(JNIEnv *env, jclass clazz, jobject instanceObj, jstring logFilePath, jboolean highQuality, jlong videoCapturer, jboolean screencast, jboolean noiseSupression, jshort customBitrate) {
initWebRTC(env);
std::shared_ptr<VideoCaptureInterface> videoCapture = videoCapturer ? std::shared_ptr<VideoCaptureInterface>(reinterpret_cast<VideoCaptureInterface *>(videoCapturer)) : nullptr;
@ -396,7 +396,8 @@ JNIEXPORT jlong JNICALL Java_org_telegram_messenger_voip_NativeInstance_makeGrou
.videoCapture = videoCapture,
.videoContentType = screencast ? VideoContentType::Screencast : VideoContentType::Generic,
.initialEnableNoiseSuppression = (bool) noiseSupression,
.platformContext = platformContext
.platformContext = platformContext,
.outgoingAudioBitrateKbit = customBitrate
};
if (!screencast) {
descriptor.requestBroadcastPart = [](std::shared_ptr<PlatformContext> platformContext, int64_t timestamp, int64_t duration, std::function<void(BroadcastPart &&)> callback) -> std::shared_ptr<BroadcastPartTask> {

View File

@ -1240,6 +1240,8 @@ std::function<webrtc::VideoTrackSourceInterface*()> videoCaptureToGetVideoSource
} // namespace
int GroupInstanceCustomImpl::customAudioBitrate = 0;
class GroupInstanceCustomInternal : public sigslot::has_slots<>, public std::enable_shared_from_this<GroupInstanceCustomInternal> {
public:
GroupInstanceCustomInternal(GroupInstanceDescriptor &&descriptor, std::shared_ptr<Threads> threads) :
@ -1266,8 +1268,11 @@ public:
_initialOutputDeviceId(std::move(descriptor.initialOutputDeviceId)),
_missingPacketBuffer(50),
_platformContext(descriptor.platformContext) {
RTC_LOG(LS_INFO) << "Init GroupInstanceCustomImpl with audio bitrate " << _outgoingAudioBitrateKbit << "kbps";
assert(_threads->getMediaThread()->IsCurrent());
GroupInstanceCustomImpl::customAudioBitrate = _outgoingAudioBitrateKbit * 1000;
_threads->getWorkerThread()->Invoke<void>(RTC_FROM_HERE, [this] {
_workerThreadSafery = webrtc::PendingTaskSafetyFlag::Create();
});
@ -1287,6 +1292,9 @@ public:
}
~GroupInstanceCustomInternal() {
RTC_LOG(LS_WARNING) << "~GroupInstanceCustomInternal, reset customAudioBitrate to zero";
GroupInstanceCustomImpl::customAudioBitrate = 0;
_incomingAudioChannels.clear();
_incomingVideoChannels.clear();
_serverBandwidthProbingVideoSsrc.reset();
@ -1312,15 +1320,16 @@ public:
void start() {
const auto weak = std::weak_ptr<GroupInstanceCustomInternal>(shared_from_this());
webrtc::field_trial::InitFieldTrialsFromString(
"WebRTC-Audio-Allocation/min:32kbps,max:32kbps/"
"WebRTC-Audio-OpusMinPacketLossRate/Enabled-1/"
"WebRTC-TaskQueuePacer/Enabled/"
"WebRTC-VP8ConferenceTemporalLayers/1/"
"WebRTC-Audio-MinimizeResamplingOnMobile/Enabled/"
//"WebRTC-MutedStateKillSwitch/Enabled/"
//"WebRTC-VP8IosMaxNumberOfThread/max_thread:1/"
);
std::ostringstream stringStream;
stringStream << "WebRTC-Audio-Allocation/min:" << _outgoingAudioBitrateKbit << "kbps,max:" << _outgoingAudioBitrateKbit << "kbps/"
<< "WebRTC-Audio-OpusMinPacketLossRate/Enabled-1/"
<< "WebRTC-TaskQueuePacer/Enabled/"
<< "WebRTC-VP8ConferenceTemporalLayers/1/"
<< "WebRTC-Audio-MinimizeResamplingOnMobile/Enabled/";
// << "WebRTC-MutedStateKillSwitch/Enabled/"
// << "WebRTC-VP8IosMaxNumberOfThread/max_thread:1/"
webrtc::field_trial::InitFieldTrialsFromString(stringStream.str().c_str());
_networkManager.reset(new ThreadLocalObject<GroupNetworkManager>(_threads->getNetworkThread(), [weak, threads = _threads] () mutable {
return new GroupNetworkManager(
@ -1693,6 +1702,7 @@ public:
cricket::AudioOptions audioOptions;
if (_disableOutgoingAudioProcessing || _videoContentType == VideoContentType::Screencast) {
RTC_LOG(LS_ERROR) << "outgoing audio processing disabled";
audioOptions.echo_cancellation = false;
audioOptions.noise_suppression = false;
audioOptions.auto_gain_control = false;
@ -1713,10 +1723,10 @@ public:
_outgoingAudioChannel = _channelManager->CreateVoiceChannel(_call.get(), cricket::MediaConfig(), _rtpTransport, _threads->getWorkerThread(), "0", false, GroupNetworkManager::getDefaulCryptoOptions(), _uniqueRandomIdGenerator.get(), audioOptions);
const uint8_t opusMinBitrateKbps = _outgoingAudioBitrateKbit;
const uint8_t opusMaxBitrateKbps = _outgoingAudioBitrateKbit;
const uint8_t opusStartBitrateKbps = _outgoingAudioBitrateKbit;
const uint8_t opusPTimeMs = 120;
int opusMinBitrateKbps = _outgoingAudioBitrateKbit;
int opusMaxBitrateKbps = _outgoingAudioBitrateKbit;
int opusStartBitrateKbps = _outgoingAudioBitrateKbit;
const uint8_t opusPTimeMs = _outgoingAudioBitrateKbit == 32 ? 120 : 10;
cricket::AudioCodec opusCodec(111, "opus", 48000, 0, 2);
opusCodec.AddFeedbackParam(cricket::FeedbackParam(cricket::kRtcpFbParamTransportCc));
@ -2293,11 +2303,11 @@ public:
preferences.max_bitrate_bps = std::max(preferences.min_bitrate_bps, (1020 + 32) * 1000);
}
} else {
preferences.min_bitrate_bps = 32000;
preferences.min_bitrate_bps = _outgoingAudioBitrateKbit * 1024;
if (resetStartBitrate) {
preferences.start_bitrate_bps = 32000;
preferences.start_bitrate_bps = _outgoingAudioBitrateKbit * 1024;
}
preferences.max_bitrate_bps = 32000;
preferences.max_bitrate_bps = _outgoingAudioBitrateKbit * 1024;
}
settings.min_bitrate_bps = preferences.min_bitrate_bps;

View File

@ -18,6 +18,7 @@ class Threads;
class GroupInstanceCustomImpl final : public GroupInstanceInterface {
public:
static int customAudioBitrate;
explicit GroupInstanceCustomImpl(GroupInstanceDescriptor &&descriptor);
~GroupInstanceCustomImpl();

View File

@ -52,6 +52,7 @@
#include "rtc_base/third_party/base64/base64.h"
#include "rtc_base/trace_event.h"
#include "system_wrappers/include/metrics.h"
#include "../../../tgcalls/group/GroupInstanceCustomImpl.h"
#if WEBRTC_ENABLE_PROTOBUF
RTC_PUSH_IGNORING_WUNDEF()
@ -1105,6 +1106,11 @@ class WebRtcVoiceMediaChannel::WebRtcAudioSendStream
max_send_bitrate_bps_, rtp_parameters_.encodings[0].max_bitrate_bps,
*audio_codec_spec_);
if (tgcalls::GroupInstanceCustomImpl::customAudioBitrate != 0) {
config_.send_codec_spec->target_bitrate_bps = tgcalls::GroupInstanceCustomImpl::customAudioBitrate;
config_.max_bitrate_bps = tgcalls::GroupInstanceCustomImpl::customAudioBitrate;
config_.min_bitrate_bps = tgcalls::GroupInstanceCustomImpl::customAudioBitrate;
}
UpdateAllowedBitrateRange();
// Encoder will only use two channels if the stereo parameter is set.
@ -1831,7 +1837,7 @@ bool WebRtcVoiceMediaChannel::SetSendCodecs(
// "unchanged" so that BWE isn't affected.
bitrate_config.start_bitrate_bps = -1;
}
call_->GetTransportControllerSend()->SetSdpBitrateParameters(bitrate_config);
call_->GetTransportControllerSend()->SetSdpBitrateParameters(bitrate_config); // here
// Check if the transport cc feedback or NACK status has changed on the
// preferred send codec, and in that case reconfigure all receive streams.

View File

@ -11,6 +11,8 @@ import org.webrtc.VideoSink;
import java.nio.ByteBuffer;
import java.util.concurrent.CountDownLatch;
import tw.nekomimi.nekogram.NekoConfig;
public class NativeInstance {
private Instance.OnStateUpdatedListener onStateUpdatedListener;
@ -63,6 +65,7 @@ public class NativeInstance {
}
public static NativeInstance makeGroup(String logPath, long videoCapturer, boolean screencast, boolean noiseSupression, PayloadCallback payloadCallback, AudioLevelsCallback audioLevelsCallback, VideoSourcesCallback unknownParticipantsCallback, RequestBroadcastPartCallback requestBroadcastPartCallback, RequestBroadcastPartCallback cancelRequestBroadcastPartCallback) {
// NekoX: Custom Audio Bitrate
ContextUtils.initialize(ApplicationLoader.applicationContext);
NativeInstance instance = new NativeInstance();
instance.payloadCallback = payloadCallback;
@ -71,7 +74,7 @@ public class NativeInstance {
instance.requestBroadcastPartCallback = requestBroadcastPartCallback;
instance.cancelRequestBroadcastPartCallback = cancelRequestBroadcastPartCallback;
instance.isGroup = true;
instance.nativePtr = makeGroupNativeInstance(instance, logPath, SharedConfig.disableVoiceAudioEffects, videoCapturer, screencast, noiseSupression);
instance.nativePtr = makeGroupNativeInstance(instance, logPath, SharedConfig.disableVoiceAudioEffects, videoCapturer, screencast, noiseSupression, NekoConfig.customAudioBitrate);
return instance;
}
@ -188,7 +191,7 @@ public class NativeInstance {
stopGroupNative();
}
private static native long makeGroupNativeInstance(NativeInstance instance, String persistentStateFilePath, boolean highQuality, long videoCapturer, boolean screencast, boolean noiseSupression);
private static native long makeGroupNativeInstance(NativeInstance instance, String persistentStateFilePath, boolean highQuality, long videoCapturer, boolean screencast, boolean noiseSupression, short customBitrate);
private static native long makeNativeInstance(String version, NativeInstance instance, Instance.Config config, String persistentStateFilePath, Instance.Endpoint[] endpoints, Instance.Proxy proxy, int networkType, Instance.EncryptionKey encryptionKey, VideoSink remoteSink, long videoCapturer, float aspectRatio);
public static native long createVideoCapturer(VideoSink localSink, int type);
public static native void setVideoStateCapturer(long videoCapturer, int videoState);

View File

@ -156,6 +156,8 @@ public class NekoConfig {
public static boolean enableStickerPin;
public static boolean useMediaStreamInVoip;
public static short customAudioBitrate = 320;
public static boolean disableGroupVoipAudioProcessing = false;
public static String getOpenPGPAppName() {
@ -326,6 +328,8 @@ public class NekoConfig {
enableStickerPin = preferences.getBoolean("enableStickerPin", false);
useMediaStreamInVoip = preferences.getBoolean("useMediaStreamInVoip", false);
customAudioBitrate = (short) preferences.getInt("customAudioBitrate", 32);
disableGroupVoipAudioProcessing = preferences.getBoolean("disableGroupVoipAudioProcessing", false);
}
@ -835,6 +839,14 @@ public class NekoConfig {
preferences.edit().putBoolean("useMediaStreamInVoip", useMediaStreamInVoip = !useMediaStreamInVoip).apply();
}
public static void setCustomAudioBitrate(int bitrate) {
preferences.edit().putInt("customAudioBitrate", customAudioBitrate = (short) bitrate).apply();
}
public static void toggleDisableGroupVoipAudioProcessing() {
preferences.edit().putBoolean("disableGroupVoipAudioProcessing", disableGroupVoipAudioProcessing = !disableGroupVoipAudioProcessing).apply();
}
private static final String EMOJI_FONT_AOSP = "NotoColorEmoji.ttf";
public static Typeface getSystemEmojiTypeface() {

View File

@ -40,8 +40,10 @@ import org.telegram.ui.Components.UndoView;
import java.util.ArrayList;
import kotlin.Unit;
import tw.nekomimi.nekogram.NekoConfig;
import tw.nekomimi.nekogram.NekoXConfig;
import tw.nekomimi.nekogram.PopupBuilder;
@SuppressLint("RtlHardcoded")
public class NekoExperimentalSettingsActivity extends BaseFragment {
@ -66,6 +68,7 @@ public class NekoExperimentalSettingsActivity extends BaseFragment {
private int unlimitedPinnedDialogsRow;
private int enableStickerPinRow;
private int useMediaStreamInVoipRow;
private int customAudioBitrateRow;
private int experiment2Row;
private UndoView tooltip;
@ -181,11 +184,33 @@ public class NekoExperimentalSettingsActivity extends BaseFragment {
tooltip.setInfoText(AndroidUtilities.replaceTags(LocaleController.formatString("EnableStickerPinTip", R.string.EnableStickerPinTip)));
tooltip.showWithAction(0, UndoView.ACTION_CACHE_WAS_CLEARED, null, null);
}
} else if (position == useMediaStreamInVoipRow){
} else if (position == useMediaStreamInVoipRow) {
NekoConfig.toggleUseMediaStreamInVoip();
if (view instanceof TextCheckCell) {
((TextCheckCell) view).setChecked(NekoConfig.useMediaStreamInVoip);
}
} else if (position == customAudioBitrateRow) {
PopupBuilder builder = new PopupBuilder(view);
builder.setItems(new String[]{
"32 (" + LocaleController.getString("Default", R.string.Default) + ")",
"64",
"128",
"192",
"256",
"320"
}, (i, __)->{
switch (i) {
case 0 : NekoConfig.setCustomAudioBitrate(32); break;
case 1 : NekoConfig.setCustomAudioBitrate(64); break;
case 2 : NekoConfig.setCustomAudioBitrate(128); break;
case 3 : NekoConfig.setCustomAudioBitrate(192); break;
case 4 : NekoConfig.setCustomAudioBitrate(256); break;
case 5 : NekoConfig.setCustomAudioBitrate(320); break;
}
listAdapter.notifyItemChanged(customAudioBitrateRow);
return Unit.INSTANCE;
});
builder.show();
}
});
@ -218,6 +243,7 @@ public class NekoExperimentalSettingsActivity extends BaseFragment {
unlimitedPinnedDialogsRow = rowCount++;
enableStickerPinRow = rowCount++;
useMediaStreamInVoipRow = rowCount++;
customAudioBitrateRow = rowCount++;
experiment2Row = rowCount++;
if (listAdapter != null) {
listAdapter.notifyDataSetChanged();
@ -334,6 +360,16 @@ public class NekoExperimentalSettingsActivity extends BaseFragment {
}
break;
}
case 2: {
TextSettingsCell textCell = (TextSettingsCell) holder.itemView;
textCell.setTextColor(Theme.getColor(Theme.key_windowBackgroundWhiteBlackText));
if (position == customAudioBitrateRow) {
String value = String.valueOf(NekoConfig.customAudioBitrate) + "kbps";
if(NekoConfig.customAudioBitrate==32) value += " (" + LocaleController.getString("Default", R.string.Default) + ")";
textCell.setTextAndValue(LocaleController.getString("customGroupVoipAudioBitrate", R.string.customGroupVoipAudioBitrate), value, true);
}
break;
}
case 3: {
TextCheckCell textCell = (TextCheckCell) holder.itemView;
textCell.setEnabled(true, null);
@ -355,7 +391,7 @@ public class NekoExperimentalSettingsActivity extends BaseFragment {
} else if (position == increaseVoiceMessageQualityRow) {
textCell.setTextAndCheck(LocaleController.getString("IncreaseVoiceMessageQuality", R.string.IncreaseVoiceMessageQuality), NekoConfig.increaseVoiceMessageQuality, true);
} else if (position == enableStickerPinRow) {
textCell.setTextAndValueAndCheck(LocaleController.getString("EnableStickerPin", R.string.EnableStickerPin), LocaleController.getString("EnableStickerPinAbout", R.string.EnableStickerPinAbout), NekoConfig.enableStickerPin, true,true);
textCell.setTextAndValueAndCheck(LocaleController.getString("EnableStickerPin", R.string.EnableStickerPin), LocaleController.getString("EnableStickerPinAbout", R.string.EnableStickerPinAbout), NekoConfig.enableStickerPin, true, true);
} else if (position == useMediaStreamInVoipRow) {
textCell.setTextAndCheck(LocaleController.getString("UseMediaStreamInVoip", R.string.UseMediaStreamInVoip), NekoConfig.useMediaStreamInVoip, true);
}
@ -384,6 +420,10 @@ public class NekoExperimentalSettingsActivity extends BaseFragment {
case 1:
view = new ShadowSectionCell(mContext);
break;
case 2:
view = new TextSettingsCell(mContext);
view.setBackgroundColor(Theme.getColor(Theme.key_windowBackgroundWhite));
break;
case 3:
view = new TextCheckCell(mContext);
view.setBackgroundColor(Theme.getColor(Theme.key_windowBackgroundWhite));
@ -416,6 +456,8 @@ public class NekoExperimentalSettingsActivity extends BaseFragment {
return 1;
} else if (position == experimentRow) {
return 4;
} else if (position == customAudioBitrateRow) {
return 2;
}
return 3;
}

View File

@ -116,5 +116,6 @@
<string name="HideTimeForSticker">Hide time for stickers</string>
<string name="useSystemDNS">Use system DNS</string>
<string name="customDoH">Custom DoH</string>
<string name="customGroupVoipAudioBitrate">Group Voip Audio Bitrate</string>
</resources>