mirror of
https://github.com/DrKLO/Telegram.git
synced 2024-12-22 14:35:03 +01:00
Update to 7.0.0 (2064)
This commit is contained in:
parent
6e495f54b8
commit
4c5f32babf
67 changed files with 2447 additions and 1283 deletions
|
@ -280,7 +280,7 @@ android {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
defaultConfig.versionCode = 2061
|
defaultConfig.versionCode = 2064
|
||||||
|
|
||||||
applicationVariants.all { variant ->
|
applicationVariants.all { variant ->
|
||||||
variant.outputs.all { output ->
|
variant.outputs.all { output ->
|
||||||
|
|
|
@ -71,6 +71,10 @@ struct InstanceHolder {
|
||||||
std::shared_ptr<tgcalls::VideoCaptureInterface> _videoCapture;
|
std::shared_ptr<tgcalls::VideoCaptureInterface> _videoCapture;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
jclass TrafficStatsClass;
|
||||||
|
jclass FinalStateClass;
|
||||||
|
jmethodID FinalStateInitMethod;
|
||||||
|
|
||||||
jlong getInstanceHolderId(JNIEnv *env, jobject obj) {
|
jlong getInstanceHolderId(JNIEnv *env, jobject obj) {
|
||||||
return env->GetLongField(obj, env->GetFieldID(env->GetObjectClass(obj), "nativePtr", "J"));
|
return env->GetLongField(obj, env->GetFieldID(env->GetObjectClass(obj), "nativePtr", "J"));
|
||||||
}
|
}
|
||||||
|
@ -200,9 +204,8 @@ jint asJavaState(const State &state) {
|
||||||
}
|
}
|
||||||
|
|
||||||
jobject asJavaTrafficStats(JNIEnv *env, const TrafficStats &trafficStats) {
|
jobject asJavaTrafficStats(JNIEnv *env, const TrafficStats &trafficStats) {
|
||||||
jclass clazz = env->FindClass("org/telegram/messenger/voip/Instance$TrafficStats");
|
jmethodID initMethodId = env->GetMethodID(TrafficStatsClass, "<init>", "(JJJJ)V");
|
||||||
jmethodID initMethodId = env->GetMethodID(clazz, "<init>", "(JJJJ)V");
|
return env->NewObject(TrafficStatsClass, initMethodId, (jlong) trafficStats.bytesSentWifi, (jlong) trafficStats.bytesReceivedWifi, (jlong) trafficStats.bytesSentMobile, (jlong) trafficStats.bytesReceivedMobile);
|
||||||
return env->NewObject(clazz, initMethodId, (jlong) trafficStats.bytesSentWifi, (jlong) trafficStats.bytesReceivedWifi, (jlong) trafficStats.bytesSentMobile, (jlong) trafficStats.bytesReceivedMobile);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
jobject asJavaFinalState(JNIEnv *env, const FinalState &finalState) {
|
jobject asJavaFinalState(JNIEnv *env, const FinalState &finalState) {
|
||||||
|
@ -210,9 +213,7 @@ jobject asJavaFinalState(JNIEnv *env, const FinalState &finalState) {
|
||||||
jstring debugLog = env->NewStringUTF(finalState.debugLog.c_str());
|
jstring debugLog = env->NewStringUTF(finalState.debugLog.c_str());
|
||||||
jobject trafficStats = asJavaTrafficStats(env, finalState.trafficStats);
|
jobject trafficStats = asJavaTrafficStats(env, finalState.trafficStats);
|
||||||
auto isRatingSuggested = static_cast<jboolean>(finalState.isRatingSuggested);
|
auto isRatingSuggested = static_cast<jboolean>(finalState.isRatingSuggested);
|
||||||
jclass finalStateClass = env->FindClass("org/telegram/messenger/voip/Instance$FinalState");
|
return env->NewObject(FinalStateClass, FinalStateInitMethod, persistentState, debugLog, trafficStats, isRatingSuggested);
|
||||||
jmethodID finalStateInitMethodId = env->GetMethodID(finalStateClass, "<init>", "([BLjava/lang/String;Lorg/telegram/messenger/voip/Instance$TrafficStats;Z)V");
|
|
||||||
return env->NewObject(finalStateClass, finalStateInitMethodId, persistentState, debugLog, trafficStats, isRatingSuggested);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
extern "C" {
|
extern "C" {
|
||||||
|
@ -229,6 +230,10 @@ void initWebRTC(JNIEnv *env) {
|
||||||
webrtc::JVM::Initialize(vm);
|
webrtc::JVM::Initialize(vm);
|
||||||
rtc::InitializeSSL();
|
rtc::InitializeSSL();
|
||||||
webrtcLoaded = true;
|
webrtcLoaded = true;
|
||||||
|
|
||||||
|
TrafficStatsClass = static_cast<jclass>(env->NewGlobalRef(env->FindClass("org/telegram/messenger/voip/Instance$TrafficStats")));
|
||||||
|
FinalStateClass = static_cast<jclass>(env->NewGlobalRef(env->FindClass("org/telegram/messenger/voip/Instance$FinalState")));
|
||||||
|
FinalStateInitMethod = env->GetMethodID(FinalStateClass, "<init>", "([BLjava/lang/String;Lorg/telegram/messenger/voip/Instance$TrafficStats;Z)V");
|
||||||
}
|
}
|
||||||
|
|
||||||
JNIEXPORT jlong JNICALL Java_org_telegram_messenger_voip_NativeInstance_makeNativeInstance(JNIEnv *env, jclass clazz, jstring version, jobject instanceObj, jobject config, jstring persistentStateFilePath, jobjectArray endpoints, jobject proxyClass, jint networkType, jobject encryptionKey, jobject remoteSink, jlong videoCapturer, jfloat aspectRatio) {
|
JNIEXPORT jlong JNICALL Java_org_telegram_messenger_voip_NativeInstance_makeNativeInstance(JNIEnv *env, jclass clazz, jstring version, jobject instanceObj, jobject config, jstring persistentStateFilePath, jobjectArray endpoints, jobject proxyClass, jint networkType, jobject encryptionKey, jobject remoteSink, jlong videoCapturer, jfloat aspectRatio) {
|
||||||
|
@ -259,7 +264,7 @@ JNIEXPORT jlong JNICALL Java_org_telegram_messenger_voip_NativeInstance_makeNati
|
||||||
.enableVolumeControl = true,
|
.enableVolumeControl = true,
|
||||||
.logPath = tgvoip::jni::JavaStringToStdString(env, configObject.getStringField("logPath")),
|
.logPath = tgvoip::jni::JavaStringToStdString(env, configObject.getStringField("logPath")),
|
||||||
.maxApiLayer = configObject.getIntField("maxApiLayer"),
|
.maxApiLayer = configObject.getIntField("maxApiLayer"),
|
||||||
/*.preferredAspectRatio = aspectRatio*/
|
.preferredAspectRatio = aspectRatio
|
||||||
},
|
},
|
||||||
.encryptionKey = EncryptionKey(
|
.encryptionKey = EncryptionKey(
|
||||||
std::move(encryptionKeyValue),
|
std::move(encryptionKeyValue),
|
||||||
|
@ -332,6 +337,7 @@ JNIEXPORT jlong JNICALL Java_org_telegram_messenger_voip_NativeInstance_makeNati
|
||||||
holder->javaInstance = globalRef;
|
holder->javaInstance = globalRef;
|
||||||
holder->_videoCapture = videoCapture;
|
holder->_videoCapture = videoCapture;
|
||||||
holder->nativeInstance->setIncomingVideoOutput(webrtc::JavaToNativeVideoSink(env, remoteSink));
|
holder->nativeInstance->setIncomingVideoOutput(webrtc::JavaToNativeVideoSink(env, remoteSink));
|
||||||
|
holder->nativeInstance->setNetworkType(parseNetworkType(networkType));
|
||||||
return reinterpret_cast<jlong>(holder);
|
return reinterpret_cast<jlong>(holder);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -384,19 +390,16 @@ JNIEXPORT jbyteArray JNICALL Java_org_telegram_messenger_voip_NativeInstance_get
|
||||||
return copyVectorToJavaByteArray(env, getInstance(env, obj)->getPersistentState().value);
|
return copyVectorToJavaByteArray(env, getInstance(env, obj)->getPersistentState().value);
|
||||||
}
|
}
|
||||||
|
|
||||||
JNIEXPORT jobject JNICALL Java_org_telegram_messenger_voip_NativeInstance_stop(JNIEnv *env, jobject obj) {
|
JNIEXPORT void JNICALL Java_org_telegram_messenger_voip_NativeInstance_stopNative(JNIEnv *env, jobject obj) {
|
||||||
InstanceHolder *instance = getInstanceHolder(env, obj);
|
InstanceHolder *instance = getInstanceHolder(env, obj);
|
||||||
FinalState finalState = instance->nativeInstance->stop();
|
instance->nativeInstance->stop([instance](FinalState finalState) {
|
||||||
|
JNIEnv *env = webrtc::AttachCurrentThreadIfNeeded();
|
||||||
// saving persistent state
|
const std::string &path = tgvoip::jni::JavaStringToStdString(env, JavaObject(env, instance->javaInstance).getStringField("persistentStateFilePath"));
|
||||||
const std::string &path = tgvoip::jni::JavaStringToStdString(env, JavaObject(env, obj).getStringField("persistentStateFilePath"));
|
|
||||||
savePersistentState(path.c_str(), finalState.persistentState);
|
savePersistentState(path.c_str(), finalState.persistentState);
|
||||||
|
env->CallVoidMethod(instance->javaInstance, env->GetMethodID(env->GetObjectClass(instance->javaInstance), "onStop", "(Lorg/telegram/messenger/voip/Instance$FinalState;)V"), asJavaFinalState(env, finalState));
|
||||||
// clean
|
|
||||||
env->DeleteGlobalRef(instance->javaInstance);
|
env->DeleteGlobalRef(instance->javaInstance);
|
||||||
delete instance;
|
delete instance;
|
||||||
|
});
|
||||||
return asJavaFinalState(env, finalState);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
JNIEXPORT long JNICALL Java_org_telegram_messenger_voip_NativeInstance_createVideoCapturer(JNIEnv *env, jclass clazz, jobject localSink) {
|
JNIEXPORT long JNICALL Java_org_telegram_messenger_voip_NativeInstance_createVideoCapturer(JNIEnv *env, jclass clazz, jobject localSink) {
|
||||||
|
|
|
@ -23,7 +23,7 @@ bool CompareFormats(const VideoFormat &a, const VideoFormat &b) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
int FormatPriority(const VideoFormat &format) {
|
int FormatPriority(const VideoFormat &format, const std::vector<std::string> &preferredCodecs) {
|
||||||
static const auto kCodecs = {
|
static const auto kCodecs = {
|
||||||
std::string(cricket::kAv1CodecName),
|
std::string(cricket::kAv1CodecName),
|
||||||
std::string(cricket::kVp9CodecName),
|
std::string(cricket::kVp9CodecName),
|
||||||
|
@ -44,7 +44,15 @@ int FormatPriority(const VideoFormat &format) {
|
||||||
return result;
|
return result;
|
||||||
}();
|
}();
|
||||||
|
|
||||||
auto result = 0;
|
for (int i = 0; i < preferredCodecs.size(); i++) {
|
||||||
|
for (const auto &name : kSupported) {
|
||||||
|
if (absl::EqualsIgnoreCase(format.name, preferredCodecs[i]) && absl::EqualsIgnoreCase(format.name, name)) {
|
||||||
|
return i;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
auto result = (int)preferredCodecs.size();
|
||||||
for (const auto &name : kSupported) {
|
for (const auto &name : kSupported) {
|
||||||
if (absl::EqualsIgnoreCase(format.name, name)) {
|
if (absl::EqualsIgnoreCase(format.name, name)) {
|
||||||
return result;
|
return result;
|
||||||
|
@ -54,17 +62,19 @@ int FormatPriority(const VideoFormat &format) {
|
||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
|
|
||||||
bool ComparePriorities(const VideoFormat &a, const VideoFormat &b) {
|
bool ComparePriorities(const VideoFormat &a, const VideoFormat &b, const std::vector<std::string> &preferredCodecs) {
|
||||||
return FormatPriority(a) < FormatPriority(b);
|
return FormatPriority(a, preferredCodecs) < FormatPriority(b, preferredCodecs);
|
||||||
}
|
}
|
||||||
|
|
||||||
std::vector<VideoFormat> FilterAndSortEncoders(std::vector<VideoFormat> list) {
|
std::vector<VideoFormat> FilterAndSortEncoders(std::vector<VideoFormat> list, const std::vector<std::string> &preferredCodecs) {
|
||||||
const auto listBegin = begin(list);
|
const auto listBegin = begin(list);
|
||||||
const auto listEnd = end(list);
|
const auto listEnd = end(list);
|
||||||
std::sort(listBegin, listEnd, ComparePriorities);
|
std::sort(listBegin, listEnd, [&preferredCodecs](const VideoFormat &lhs, const VideoFormat &rhs) {
|
||||||
|
return ComparePriorities(lhs, rhs, preferredCodecs);
|
||||||
|
});
|
||||||
auto eraseFrom = listBegin;
|
auto eraseFrom = listBegin;
|
||||||
auto eraseTill = eraseFrom;
|
auto eraseTill = eraseFrom;
|
||||||
while (eraseTill != listEnd && FormatPriority(*eraseTill) == -1) {
|
while (eraseTill != listEnd && FormatPriority(*eraseTill, preferredCodecs) == -1) {
|
||||||
++eraseTill;
|
++eraseTill;
|
||||||
}
|
}
|
||||||
if (eraseTill != eraseFrom) {
|
if (eraseTill != eraseFrom) {
|
||||||
|
@ -131,11 +141,12 @@ void AddDefaultFeedbackParams(cricket::VideoCodec *codec) {
|
||||||
|
|
||||||
VideoFormatsMessage ComposeSupportedFormats(
|
VideoFormatsMessage ComposeSupportedFormats(
|
||||||
std::vector<VideoFormat> encoders,
|
std::vector<VideoFormat> encoders,
|
||||||
std::vector<VideoFormat> decoders) {
|
std::vector<VideoFormat> decoders,
|
||||||
encoders = FilterAndSortEncoders(std::move(encoders));
|
const std::vector<std::string> &preferredCodecs) {
|
||||||
|
encoders = FilterAndSortEncoders(std::move(encoders), preferredCodecs);
|
||||||
|
|
||||||
auto result = VideoFormatsMessage();
|
auto result = VideoFormatsMessage();
|
||||||
result.encodersCount = encoders.size();
|
result.encodersCount = (int)encoders.size();
|
||||||
result.formats = AppendUnique(std::move(encoders), std::move(decoders));
|
result.formats = AppendUnique(std::move(encoders), std::move(decoders));
|
||||||
for (const auto &format : result.formats) {
|
for (const auto &format : result.formats) {
|
||||||
RTC_LOG(LS_INFO) << "Format: " << format.ToString();
|
RTC_LOG(LS_INFO) << "Format: " << format.ToString();
|
||||||
|
|
|
@ -18,7 +18,8 @@ struct CommonCodecs {
|
||||||
|
|
||||||
VideoFormatsMessage ComposeSupportedFormats(
|
VideoFormatsMessage ComposeSupportedFormats(
|
||||||
std::vector<webrtc::SdpVideoFormat> encoders,
|
std::vector<webrtc::SdpVideoFormat> encoders,
|
||||||
std::vector<webrtc::SdpVideoFormat> decoders);
|
std::vector<webrtc::SdpVideoFormat> decoders,
|
||||||
|
const std::vector<std::string> &preferredCodecs);
|
||||||
|
|
||||||
CommonFormats ComputeCommonFormats(
|
CommonFormats ComputeCommonFormats(
|
||||||
const VideoFormatsMessage &my,
|
const VideoFormatsMessage &my,
|
||||||
|
|
|
@ -1,5 +1,7 @@
|
||||||
#include "CryptoHelper.h"
|
#include "CryptoHelper.h"
|
||||||
|
|
||||||
|
#include <cstring>
|
||||||
|
|
||||||
namespace tgcalls {
|
namespace tgcalls {
|
||||||
|
|
||||||
AesKeyIv PrepareAesKeyIv(const uint8_t *key, const uint8_t *msgKey, int x) {
|
AesKeyIv PrepareAesKeyIv(const uint8_t *key, const uint8_t *msgKey, int x) {
|
||||||
|
|
|
@ -58,6 +58,16 @@ absl::nullopt_t LogError(
|
||||||
return absl::nullopt;
|
return absl::nullopt;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
bool ConstTimeIsDifferent(const void *a, const void *b, size_t size) {
|
||||||
|
auto ca = reinterpret_cast<const char*>(a);
|
||||||
|
auto cb = reinterpret_cast<const char*>(b);
|
||||||
|
volatile auto different = false;
|
||||||
|
for (const auto ce = ca + size; ca != ce; ++ca, ++cb) {
|
||||||
|
different |= (*ca != *cb);
|
||||||
|
}
|
||||||
|
return different;
|
||||||
|
}
|
||||||
|
|
||||||
} // namespace
|
} // namespace
|
||||||
|
|
||||||
EncryptedConnection::EncryptedConnection(
|
EncryptedConnection::EncryptedConnection(
|
||||||
|
@ -326,7 +336,7 @@ auto EncryptedConnection::handleIncomingPacket(const char *bytes, size_t size)
|
||||||
const auto msgKeyLarge = ConcatSHA256(
|
const auto msgKeyLarge = ConcatSHA256(
|
||||||
MemorySpan{ key + 88 + x, 32 },
|
MemorySpan{ key + 88 + x, 32 },
|
||||||
MemorySpan{ decryptionBuffer.data(), decryptionBuffer.size() });
|
MemorySpan{ decryptionBuffer.data(), decryptionBuffer.size() });
|
||||||
if (memcmp(msgKeyLarge.data() + 8, msgKey, 16)) {
|
if (ConstTimeIsDifferent(msgKeyLarge.data() + 8, msgKey, 16)) {
|
||||||
return LogError("Bad incoming data hash.");
|
return LogError("Bad incoming data hash.");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -364,10 +374,16 @@ auto EncryptedConnection::processPacket(
|
||||||
}
|
}
|
||||||
|
|
||||||
if (type == kEmptyId) {
|
if (type == kEmptyId) {
|
||||||
|
if (additionalMessage) {
|
||||||
|
return LogError("Empty message should be only the first one in the packet.");
|
||||||
|
}
|
||||||
RTC_LOG(LS_INFO) << logHeader()
|
RTC_LOG(LS_INFO) << logHeader()
|
||||||
<< "Got RECV:empty" << "#" << currentCounter;
|
<< "Got RECV:empty" << "#" << currentCounter;
|
||||||
reader.Consume(1);
|
reader.Consume(1);
|
||||||
} else if (type == kAckId) {
|
} else if (type == kAckId) {
|
||||||
|
if (!additionalMessage) {
|
||||||
|
return LogError("Ack message must not be the first one in the packet.");
|
||||||
|
}
|
||||||
ackMyMessage(currentSeq);
|
ackMyMessage(currentSeq);
|
||||||
reader.Consume(1);
|
reader.Consume(1);
|
||||||
} else if (auto message = DeserializeMessage(reader, singleMessagePacket)) {
|
} else if (auto message = DeserializeMessage(reader, singleMessagePacket)) {
|
||||||
|
|
|
@ -1,7 +1,5 @@
|
||||||
#include "Instance.h"
|
#include "Instance.h"
|
||||||
|
|
||||||
#include "VideoCaptureInterfaceImpl.h"
|
|
||||||
|
|
||||||
#include <algorithm>
|
#include <algorithm>
|
||||||
#include <stdarg.h>
|
#include <stdarg.h>
|
||||||
|
|
||||||
|
@ -10,8 +8,8 @@ namespace {
|
||||||
|
|
||||||
std::function<void(std::string const &)> globalLoggingFunction;
|
std::function<void(std::string const &)> globalLoggingFunction;
|
||||||
|
|
||||||
std::map<std::string, std::unique_ptr<Meta>> &MetaMap() {
|
std::map<std::string, std::shared_ptr<Meta>> &MetaMap() {
|
||||||
static auto result = std::map<std::string, std::unique_ptr<Meta>>();
|
static auto result = std::map<std::string, std::shared_ptr<Meta>>();
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -44,10 +42,12 @@ std::unique_ptr<Instance> Meta::Create(
|
||||||
: nullptr;
|
: nullptr;
|
||||||
}
|
}
|
||||||
|
|
||||||
void Meta::RegisterOne(std::unique_ptr<Meta> meta) {
|
void Meta::RegisterOne(std::shared_ptr<Meta> meta) {
|
||||||
if (meta) {
|
if (meta) {
|
||||||
const auto version = meta->version();
|
const auto versions = meta->versions();
|
||||||
MetaMap().emplace(version, std::move(meta));
|
for (auto &it : versions) {
|
||||||
|
MetaMap().emplace(it, meta);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -55,6 +55,11 @@ struct Endpoint {
|
||||||
unsigned char peerTag[16] = { 0 };
|
unsigned char peerTag[16] = { 0 };
|
||||||
};
|
};
|
||||||
|
|
||||||
|
enum class ProtocolVersion {
|
||||||
|
V0,
|
||||||
|
V1 // Low-cost network negotiation
|
||||||
|
};
|
||||||
|
|
||||||
enum class NetworkType {
|
enum class NetworkType {
|
||||||
Unknown,
|
Unknown,
|
||||||
Gprs,
|
Gprs,
|
||||||
|
@ -98,6 +103,8 @@ struct Config {
|
||||||
int maxApiLayer = 0;
|
int maxApiLayer = 0;
|
||||||
float preferredAspectRatio;
|
float preferredAspectRatio;
|
||||||
bool enableHighBitrateVideo = false;
|
bool enableHighBitrateVideo = false;
|
||||||
|
std::vector<std::string> preferredVideoCodecs;
|
||||||
|
ProtocolVersion protocolVersion = ProtocolVersion::V0;
|
||||||
};
|
};
|
||||||
|
|
||||||
struct EncryptionKey {
|
struct EncryptionKey {
|
||||||
|
@ -174,7 +181,7 @@ public:
|
||||||
virtual void receiveSignalingData(const std::vector<uint8_t> &data) = 0;
|
virtual void receiveSignalingData(const std::vector<uint8_t> &data) = 0;
|
||||||
virtual void setVideoCapture(std::shared_ptr<VideoCaptureInterface> videoCapture) = 0;
|
virtual void setVideoCapture(std::shared_ptr<VideoCaptureInterface> videoCapture) = 0;
|
||||||
|
|
||||||
virtual FinalState stop() = 0;
|
virtual void stop(std::function<void(FinalState)> completion) = 0;
|
||||||
|
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -204,7 +211,7 @@ public:
|
||||||
|
|
||||||
virtual std::unique_ptr<Instance> construct(Descriptor &&descriptor) = 0;
|
virtual std::unique_ptr<Instance> construct(Descriptor &&descriptor) = 0;
|
||||||
virtual int connectionMaxLayer() = 0;
|
virtual int connectionMaxLayer() = 0;
|
||||||
virtual std::string version() = 0;
|
virtual std::vector<std::string> versions() = 0;
|
||||||
|
|
||||||
static std::unique_ptr<Instance> Create(
|
static std::unique_ptr<Instance> Create(
|
||||||
const std::string &version,
|
const std::string &version,
|
||||||
|
@ -218,7 +225,7 @@ private:
|
||||||
|
|
||||||
template <typename Implementation>
|
template <typename Implementation>
|
||||||
static bool RegisterOne();
|
static bool RegisterOne();
|
||||||
static void RegisterOne(std::unique_ptr<Meta> meta);
|
static void RegisterOne(std::shared_ptr<Meta> meta);
|
||||||
|
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -229,14 +236,14 @@ bool Meta::RegisterOne() {
|
||||||
int connectionMaxLayer() override {
|
int connectionMaxLayer() override {
|
||||||
return Implementation::GetConnectionMaxLayer();
|
return Implementation::GetConnectionMaxLayer();
|
||||||
}
|
}
|
||||||
std::string version() override {
|
std::vector<std::string> versions() override {
|
||||||
return Implementation::GetVersion();
|
return Implementation::GetVersions();
|
||||||
}
|
}
|
||||||
std::unique_ptr<Instance> construct(Descriptor &&descriptor) override {
|
std::unique_ptr<Instance> construct(Descriptor &&descriptor) override {
|
||||||
return std::make_unique<Implementation>(std::move(descriptor));
|
return std::make_unique<Implementation>(std::move(descriptor));
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
RegisterOne(std::make_unique<MetaImpl>());
|
RegisterOne(std::make_shared<MetaImpl>());
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -30,12 +30,16 @@ InstanceImpl::InstanceImpl(Descriptor &&descriptor)
|
||||||
rtc::LogMessage::SetLogToStderr(false);
|
rtc::LogMessage::SetLogToStderr(false);
|
||||||
rtc::LogMessage::AddLogToStream(_logSink.get(), rtc::LS_INFO);
|
rtc::LogMessage::AddLogToStream(_logSink.get(), rtc::LS_INFO);
|
||||||
|
|
||||||
|
auto networkType = descriptor.initialNetworkType;
|
||||||
|
|
||||||
_manager.reset(new ThreadLocalObject<Manager>(getManagerThread(), [descriptor = std::move(descriptor)]() mutable {
|
_manager.reset(new ThreadLocalObject<Manager>(getManagerThread(), [descriptor = std::move(descriptor)]() mutable {
|
||||||
return new Manager(getManagerThread(), std::move(descriptor));
|
return new Manager(getManagerThread(), std::move(descriptor));
|
||||||
}));
|
}));
|
||||||
_manager->perform(RTC_FROM_HERE, [](Manager *manager) {
|
_manager->perform(RTC_FROM_HERE, [](Manager *manager) {
|
||||||
manager->start();
|
manager->start();
|
||||||
});
|
});
|
||||||
|
|
||||||
|
setNetworkType(networkType);
|
||||||
}
|
}
|
||||||
|
|
||||||
InstanceImpl::~InstanceImpl() {
|
InstanceImpl::~InstanceImpl() {
|
||||||
|
@ -55,51 +59,19 @@ void InstanceImpl::setVideoCapture(std::shared_ptr<VideoCaptureInterface> videoC
|
||||||
}
|
}
|
||||||
|
|
||||||
void InstanceImpl::setNetworkType(NetworkType networkType) {
|
void InstanceImpl::setNetworkType(NetworkType networkType) {
|
||||||
/*message::NetworkType mappedType;
|
bool isLowCostNetwork = false;
|
||||||
|
|
||||||
switch (networkType) {
|
switch (networkType) {
|
||||||
case NetworkType::Unknown:
|
|
||||||
mappedType = message::NetworkType::nUnknown;
|
|
||||||
break;
|
|
||||||
case NetworkType::Gprs:
|
|
||||||
mappedType = message::NetworkType::nGprs;
|
|
||||||
break;
|
|
||||||
case NetworkType::Edge:
|
|
||||||
mappedType = message::NetworkType::nEdge;
|
|
||||||
break;
|
|
||||||
case NetworkType::ThirdGeneration:
|
|
||||||
mappedType = message::NetworkType::n3gOrAbove;
|
|
||||||
break;
|
|
||||||
case NetworkType::Hspa:
|
|
||||||
mappedType = message::NetworkType::n3gOrAbove;
|
|
||||||
break;
|
|
||||||
case NetworkType::Lte:
|
|
||||||
mappedType = message::NetworkType::n3gOrAbove;
|
|
||||||
break;
|
|
||||||
case NetworkType::WiFi:
|
case NetworkType::WiFi:
|
||||||
mappedType = message::NetworkType::nHighSpeed;
|
|
||||||
break;
|
|
||||||
case NetworkType::Ethernet:
|
case NetworkType::Ethernet:
|
||||||
mappedType = message::NetworkType::nHighSpeed;
|
isLowCostNetwork = true;
|
||||||
break;
|
|
||||||
case NetworkType::OtherHighSpeed:
|
|
||||||
mappedType = message::NetworkType::nHighSpeed;
|
|
||||||
break;
|
|
||||||
case NetworkType::OtherLowSpeed:
|
|
||||||
mappedType = message::NetworkType::nEdge;
|
|
||||||
break;
|
|
||||||
case NetworkType::OtherMobile:
|
|
||||||
mappedType = message::NetworkType::n3gOrAbove;
|
|
||||||
break;
|
|
||||||
case NetworkType::Dialup:
|
|
||||||
mappedType = message::NetworkType::nGprs;
|
|
||||||
break;
|
break;
|
||||||
default:
|
default:
|
||||||
mappedType = message::NetworkType::nUnknown;
|
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
controller_->SetNetworkType(mappedType);*/
|
_manager->perform(RTC_FROM_HERE, [isLowCostNetwork](Manager *manager) {
|
||||||
|
manager->setIsLocalNetworkLowCost(isLowCostNetwork);
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
void InstanceImpl::setMuteMicrophone(bool muteMicrophone) {
|
void InstanceImpl::setMuteMicrophone(bool muteMicrophone) {
|
||||||
|
@ -166,12 +138,19 @@ PersistentState InstanceImpl::getPersistentState() {
|
||||||
return PersistentState{}; // we dont't have such information
|
return PersistentState{}; // we dont't have such information
|
||||||
}
|
}
|
||||||
|
|
||||||
FinalState InstanceImpl::stop() {
|
void InstanceImpl::stop(std::function<void(FinalState)> completion) {
|
||||||
FinalState finalState;
|
std::string debugLog = _logSink->result();
|
||||||
finalState.debugLog = _logSink->result();
|
|
||||||
finalState.isRatingSuggested = false;
|
|
||||||
|
|
||||||
return finalState;
|
_manager->perform(RTC_FROM_HERE, [completion, debugLog = std::move(debugLog)](Manager *manager) {
|
||||||
|
manager->getNetworkStats([completion, debugLog = std::move(debugLog)](TrafficStats stats) {
|
||||||
|
FinalState finalState;
|
||||||
|
finalState.debugLog = debugLog;
|
||||||
|
finalState.isRatingSuggested = false;
|
||||||
|
finalState.trafficStats = stats;
|
||||||
|
|
||||||
|
completion(finalState);
|
||||||
|
});
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
/*void InstanceImpl::controllerStateCallback(Controller::State state) {
|
/*void InstanceImpl::controllerStateCallback(Controller::State state) {
|
||||||
|
@ -201,8 +180,11 @@ int InstanceImpl::GetConnectionMaxLayer() {
|
||||||
return 92; // TODO: retrieve from LayerBase
|
return 92; // TODO: retrieve from LayerBase
|
||||||
}
|
}
|
||||||
|
|
||||||
std::string InstanceImpl::GetVersion() {
|
std::vector<std::string> InstanceImpl::GetVersions() {
|
||||||
return "2.7.7"; // TODO: version not known while not released
|
std::vector<std::string> result;
|
||||||
|
result.push_back("2.7.7");
|
||||||
|
result.push_back("3.0.0");
|
||||||
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
template <>
|
template <>
|
||||||
|
|
|
@ -17,7 +17,7 @@ public:
|
||||||
~InstanceImpl() override;
|
~InstanceImpl() override;
|
||||||
|
|
||||||
static int GetConnectionMaxLayer();
|
static int GetConnectionMaxLayer();
|
||||||
static std::string GetVersion();
|
static std::vector<std::string> GetVersions();
|
||||||
|
|
||||||
void receiveSignalingData(const std::vector<uint8_t> &data) override;
|
void receiveSignalingData(const std::vector<uint8_t> &data) override;
|
||||||
void setVideoCapture(std::shared_ptr<VideoCaptureInterface> videoCapture) override;
|
void setVideoCapture(std::shared_ptr<VideoCaptureInterface> videoCapture) override;
|
||||||
|
@ -37,7 +37,7 @@ public:
|
||||||
int64_t getPreferredRelayId() override;
|
int64_t getPreferredRelayId() override;
|
||||||
TrafficStats getTrafficStats() override;
|
TrafficStats getTrafficStats() override;
|
||||||
PersistentState getPersistentState() override;
|
PersistentState getPersistentState() override;
|
||||||
FinalState stop() override;
|
void stop(std::function<void(FinalState)> completion) override;
|
||||||
//void controllerStateCallback(Controller::State state);
|
//void controllerStateCallback(Controller::State state);
|
||||||
|
|
||||||
private:
|
private:
|
||||||
|
|
|
@ -5,9 +5,9 @@
|
||||||
#ifdef WEBRTC_WIN
|
#ifdef WEBRTC_WIN
|
||||||
#include "windows.h"
|
#include "windows.h"
|
||||||
#include <ctime>
|
#include <ctime>
|
||||||
#elif defined(WEBRTC_IOS) || defined(WEBRTC_MAC)
|
#else // WEBRTC_WIN
|
||||||
#include <sys/time.h>
|
#include <sys/time.h>
|
||||||
#endif //WEBRTC_IOS || WEBRTC_MAC
|
#endif // WEBRTC_WIN
|
||||||
|
|
||||||
namespace tgcalls {
|
namespace tgcalls {
|
||||||
|
|
||||||
|
|
|
@ -39,6 +39,7 @@ _signaling(
|
||||||
_encryptionKey,
|
_encryptionKey,
|
||||||
[=](int delayMs, int cause) { sendSignalingAsync(delayMs, cause); }),
|
[=](int delayMs, int cause) { sendSignalingAsync(delayMs, cause); }),
|
||||||
_enableP2P(descriptor.config.enableP2P),
|
_enableP2P(descriptor.config.enableP2P),
|
||||||
|
_protocolVersion(descriptor.config.protocolVersion),
|
||||||
_rtcServers(std::move(descriptor.rtcServers)),
|
_rtcServers(std::move(descriptor.rtcServers)),
|
||||||
_videoCapture(std::move(descriptor.videoCapture)),
|
_videoCapture(std::move(descriptor.videoCapture)),
|
||||||
_stateUpdated(std::move(descriptor.stateUpdated)),
|
_stateUpdated(std::move(descriptor.stateUpdated)),
|
||||||
|
@ -53,6 +54,8 @@ _enableHighBitrateVideo(descriptor.config.enableHighBitrateVideo) {
|
||||||
assert(_stateUpdated != nullptr);
|
assert(_stateUpdated != nullptr);
|
||||||
assert(_signalingDataEmitted != nullptr);
|
assert(_signalingDataEmitted != nullptr);
|
||||||
|
|
||||||
|
_preferredCodecs = descriptor.config.preferredVideoCodecs;
|
||||||
|
|
||||||
_sendSignalingMessage = [=](const Message &message) {
|
_sendSignalingMessage = [=](const Message &message) {
|
||||||
if (const auto prepared = _signaling.prepareForSending(message)) {
|
if (const auto prepared = _signaling.prepareForSending(message)) {
|
||||||
_signalingDataEmitted(prepared->bytes);
|
_signalingDataEmitted(prepared->bytes);
|
||||||
|
@ -112,12 +115,19 @@ void Manager::start() {
|
||||||
if (!strong) {
|
if (!strong) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
const auto mappedState = state.isReadyToSendData
|
State mappedState;
|
||||||
|
if (state.isFailed) {
|
||||||
|
mappedState = State::Failed;
|
||||||
|
} else {
|
||||||
|
mappedState = state.isReadyToSendData
|
||||||
? State::Established
|
? State::Established
|
||||||
: State::Reconnecting;
|
: State::Reconnecting;
|
||||||
|
}
|
||||||
|
bool isFirstConnection = false;
|
||||||
if (state.isReadyToSendData) {
|
if (state.isReadyToSendData) {
|
||||||
if (!strong->_didConnectOnce) {
|
if (!strong->_didConnectOnce) {
|
||||||
strong->_didConnectOnce = true;
|
strong->_didConnectOnce = true;
|
||||||
|
isFirstConnection = true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
strong->_state = mappedState;
|
strong->_state = mappedState;
|
||||||
|
@ -126,6 +136,10 @@ void Manager::start() {
|
||||||
strong->_mediaManager->perform(RTC_FROM_HERE, [=](MediaManager *mediaManager) {
|
strong->_mediaManager->perform(RTC_FROM_HERE, [=](MediaManager *mediaManager) {
|
||||||
mediaManager->setIsConnected(state.isReadyToSendData);
|
mediaManager->setIsConnected(state.isReadyToSendData);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
if (isFirstConnection) {
|
||||||
|
strong->sendInitialSignalingMessages();
|
||||||
|
}
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
[=](DecryptedMessage &&message) {
|
[=](DecryptedMessage &&message) {
|
||||||
|
@ -152,7 +166,7 @@ void Manager::start() {
|
||||||
});
|
});
|
||||||
}));
|
}));
|
||||||
bool isOutgoing = _encryptionKey.isOutgoing;
|
bool isOutgoing = _encryptionKey.isOutgoing;
|
||||||
_mediaManager.reset(new ThreadLocalObject<MediaManager>(getMediaThread(), [weak, isOutgoing, thread, sendSignalingMessage, videoCapture = _videoCapture, localPreferredVideoAspectRatio = _localPreferredVideoAspectRatio, enableHighBitrateVideo = _enableHighBitrateVideo, signalBarsUpdated = _signalBarsUpdated]() {
|
_mediaManager.reset(new ThreadLocalObject<MediaManager>(getMediaThread(), [weak, isOutgoing, thread, sendSignalingMessage, videoCapture = _videoCapture, localPreferredVideoAspectRatio = _localPreferredVideoAspectRatio, enableHighBitrateVideo = _enableHighBitrateVideo, signalBarsUpdated = _signalBarsUpdated, preferredCodecs = _preferredCodecs]() {
|
||||||
return new MediaManager(
|
return new MediaManager(
|
||||||
getMediaThread(),
|
getMediaThread(),
|
||||||
isOutgoing,
|
isOutgoing,
|
||||||
|
@ -169,8 +183,12 @@ void Manager::start() {
|
||||||
},
|
},
|
||||||
signalBarsUpdated,
|
signalBarsUpdated,
|
||||||
localPreferredVideoAspectRatio,
|
localPreferredVideoAspectRatio,
|
||||||
enableHighBitrateVideo);
|
enableHighBitrateVideo,
|
||||||
|
preferredCodecs);
|
||||||
}));
|
}));
|
||||||
|
_networkManager->perform(RTC_FROM_HERE, [](NetworkManager *networkManager) {
|
||||||
|
networkManager->start();
|
||||||
|
});
|
||||||
_mediaManager->perform(RTC_FROM_HERE, [](MediaManager *mediaManager) {
|
_mediaManager->perform(RTC_FROM_HERE, [](MediaManager *mediaManager) {
|
||||||
mediaManager->start();
|
mediaManager->start();
|
||||||
});
|
});
|
||||||
|
@ -208,6 +226,10 @@ void Manager::receiveMessage(DecryptedMessage &&message) {
|
||||||
if (_remoteBatteryLevelIsLowUpdated) {
|
if (_remoteBatteryLevelIsLowUpdated) {
|
||||||
_remoteBatteryLevelIsLowUpdated(remoteBatteryLevelIsLow->batteryLow);
|
_remoteBatteryLevelIsLowUpdated(remoteBatteryLevelIsLow->batteryLow);
|
||||||
}
|
}
|
||||||
|
} else if (const auto remoteNetworkType = absl::get_if<RemoteNetworkTypeMessage>(data)) {
|
||||||
|
bool wasCurrentNetworkLowCost = calculateIsCurrentNetworkLowCost();
|
||||||
|
_remoteNetworkIsLowCost = remoteNetworkType->isLowCost;
|
||||||
|
updateIsCurrentNetworkLowCost(wasCurrentNetworkLowCost);
|
||||||
} else {
|
} else {
|
||||||
if (const auto videoParameters = absl::get_if<VideoParametersMessage>(data)) {
|
if (const auto videoParameters = absl::get_if<VideoParametersMessage>(data)) {
|
||||||
float value = ((float)videoParameters->aspectRatio) / 1000.0;
|
float value = ((float)videoParameters->aspectRatio) / 1000.0;
|
||||||
|
@ -249,4 +271,54 @@ void Manager::setIsLowBatteryLevel(bool isLowBatteryLevel) {
|
||||||
_sendTransportMessage({ RemoteBatteryLevelIsLowMessage{ isLowBatteryLevel } });
|
_sendTransportMessage({ RemoteBatteryLevelIsLowMessage{ isLowBatteryLevel } });
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void Manager::setIsLocalNetworkLowCost(bool isLocalNetworkLowCost) {
|
||||||
|
if (isLocalNetworkLowCost != _localNetworkIsLowCost) {
|
||||||
|
_networkManager->perform(RTC_FROM_HERE, [isLocalNetworkLowCost](NetworkManager *networkManager) {
|
||||||
|
networkManager->setIsLocalNetworkLowCost(isLocalNetworkLowCost);
|
||||||
|
});
|
||||||
|
|
||||||
|
bool wasCurrentNetworkLowCost = calculateIsCurrentNetworkLowCost();
|
||||||
|
_localNetworkIsLowCost = isLocalNetworkLowCost;
|
||||||
|
updateIsCurrentNetworkLowCost(wasCurrentNetworkLowCost);
|
||||||
|
|
||||||
|
switch (_protocolVersion) {
|
||||||
|
case ProtocolVersion::V1:
|
||||||
|
if (_didConnectOnce) {
|
||||||
|
_sendTransportMessage({ RemoteNetworkTypeMessage{ isLocalNetworkLowCost } });
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
void Manager::getNetworkStats(std::function<void (TrafficStats)> completion) {
|
||||||
|
_networkManager->perform(RTC_FROM_HERE, [completion = std::move(completion)](NetworkManager *networkManager) {
|
||||||
|
completion(networkManager->getNetworkStats());
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
bool Manager::calculateIsCurrentNetworkLowCost() const {
|
||||||
|
return _localNetworkIsLowCost && _remoteNetworkIsLowCost;
|
||||||
|
}
|
||||||
|
void Manager::updateIsCurrentNetworkLowCost(bool wasLowCost) {
|
||||||
|
bool isLowCost = calculateIsCurrentNetworkLowCost();
|
||||||
|
if (isLowCost != wasLowCost) {
|
||||||
|
_mediaManager->perform(RTC_FROM_HERE, [isLowCost](MediaManager *mediaManager) {
|
||||||
|
mediaManager->setIsCurrentNetworkLowCost(isLowCost);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
void Manager::sendInitialSignalingMessages() {
|
||||||
|
switch (_protocolVersion) {
|
||||||
|
case ProtocolVersion::V1:
|
||||||
|
_sendTransportMessage({ RemoteNetworkTypeMessage{ _localNetworkIsLowCost } });
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
} // namespace tgcalls
|
} // namespace tgcalls
|
||||||
|
|
|
@ -22,15 +22,21 @@ public:
|
||||||
void setMuteOutgoingAudio(bool mute);
|
void setMuteOutgoingAudio(bool mute);
|
||||||
void setIncomingVideoOutput(std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink);
|
void setIncomingVideoOutput(std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink);
|
||||||
void setIsLowBatteryLevel(bool isLowBatteryLevel);
|
void setIsLowBatteryLevel(bool isLowBatteryLevel);
|
||||||
|
void setIsLocalNetworkLowCost(bool isLocalNetworkLowCost);
|
||||||
|
void getNetworkStats(std::function<void(TrafficStats)> completion);
|
||||||
|
|
||||||
private:
|
private:
|
||||||
void sendSignalingAsync(int delayMs, int cause);
|
void sendSignalingAsync(int delayMs, int cause);
|
||||||
void receiveMessage(DecryptedMessage &&message);
|
void receiveMessage(DecryptedMessage &&message);
|
||||||
|
bool calculateIsCurrentNetworkLowCost() const;
|
||||||
|
void updateIsCurrentNetworkLowCost(bool wasLowCost);
|
||||||
|
void sendInitialSignalingMessages();
|
||||||
|
|
||||||
rtc::Thread *_thread;
|
rtc::Thread *_thread;
|
||||||
EncryptionKey _encryptionKey;
|
EncryptionKey _encryptionKey;
|
||||||
EncryptedConnection _signaling;
|
EncryptedConnection _signaling;
|
||||||
bool _enableP2P = false;
|
bool _enableP2P = false;
|
||||||
|
ProtocolVersion _protocolVersion = ProtocolVersion::V0;
|
||||||
std::vector<RtcServer> _rtcServers;
|
std::vector<RtcServer> _rtcServers;
|
||||||
std::shared_ptr<VideoCaptureInterface> _videoCapture;
|
std::shared_ptr<VideoCaptureInterface> _videoCapture;
|
||||||
std::function<void(State)> _stateUpdated;
|
std::function<void(State)> _stateUpdated;
|
||||||
|
@ -47,6 +53,9 @@ private:
|
||||||
bool _didConnectOnce = false;
|
bool _didConnectOnce = false;
|
||||||
float _localPreferredVideoAspectRatio = 0.0f;
|
float _localPreferredVideoAspectRatio = 0.0f;
|
||||||
bool _enableHighBitrateVideo = false;
|
bool _enableHighBitrateVideo = false;
|
||||||
|
std::vector<std::string> _preferredCodecs;
|
||||||
|
bool _localNetworkIsLowCost = false;
|
||||||
|
bool _remoteNetworkIsLowCost = false;
|
||||||
|
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -58,7 +58,8 @@ MediaManager::MediaManager(
|
||||||
std::function<void(Message &&)> sendTransportMessage,
|
std::function<void(Message &&)> sendTransportMessage,
|
||||||
std::function<void(int)> signalBarsUpdated,
|
std::function<void(int)> signalBarsUpdated,
|
||||||
float localPreferredVideoAspectRatio,
|
float localPreferredVideoAspectRatio,
|
||||||
bool enableHighBitrateVideo) :
|
bool enableHighBitrateVideo,
|
||||||
|
std::vector<std::string> preferredCodecs) :
|
||||||
_thread(thread),
|
_thread(thread),
|
||||||
_eventLog(std::make_unique<webrtc::RtcEventLogNull>()),
|
_eventLog(std::make_unique<webrtc::RtcEventLogNull>()),
|
||||||
_taskQueueFactory(webrtc::CreateDefaultTaskQueueFactory()),
|
_taskQueueFactory(webrtc::CreateDefaultTaskQueueFactory()),
|
||||||
|
@ -103,7 +104,8 @@ _enableHighBitrateVideo(enableHighBitrateVideo) {
|
||||||
|
|
||||||
_myVideoFormats = ComposeSupportedFormats(
|
_myVideoFormats = ComposeSupportedFormats(
|
||||||
mediaDeps.video_encoder_factory->GetSupportedFormats(),
|
mediaDeps.video_encoder_factory->GetSupportedFormats(),
|
||||||
mediaDeps.video_decoder_factory->GetSupportedFormats());
|
mediaDeps.video_decoder_factory->GetSupportedFormats(),
|
||||||
|
preferredCodecs);
|
||||||
|
|
||||||
mediaDeps.audio_processing = webrtc::AudioProcessingBuilder().Create();
|
mediaDeps.audio_processing = webrtc::AudioProcessingBuilder().Create();
|
||||||
_mediaEngine = cricket::CreateMediaEngine(std::move(mediaDeps));
|
_mediaEngine = cricket::CreateMediaEngine(std::move(mediaDeps));
|
||||||
|
@ -169,7 +171,7 @@ _enableHighBitrateVideo(enableHighBitrateVideo) {
|
||||||
|
|
||||||
_videoChannel->SetInterface(_videoNetworkInterface.get());
|
_videoChannel->SetInterface(_videoNetworkInterface.get());
|
||||||
|
|
||||||
adjustBitratePreferences();
|
adjustBitratePreferences(true);
|
||||||
}
|
}
|
||||||
|
|
||||||
void MediaManager::start() {
|
void MediaManager::start() {
|
||||||
|
@ -379,7 +381,7 @@ void MediaManager::configureSendingVideoIfNeeded() {
|
||||||
|
|
||||||
codec.SetParam(cricket::kCodecParamMinBitrate, 64);
|
codec.SetParam(cricket::kCodecParamMinBitrate, 64);
|
||||||
codec.SetParam(cricket::kCodecParamStartBitrate, 400);
|
codec.SetParam(cricket::kCodecParamStartBitrate, 400);
|
||||||
codec.SetParam(cricket::kCodecParamMaxBitrate, _enableHighBitrateVideo ? 1600 : 800);
|
codec.SetParam(cricket::kCodecParamMaxBitrate, _enableHighBitrateVideo ? 2000 : 800);
|
||||||
|
|
||||||
cricket::VideoSendParameters videoSendParameters;
|
cricket::VideoSendParameters videoSendParameters;
|
||||||
videoSendParameters.codecs.push_back(codec);
|
videoSendParameters.codecs.push_back(codec);
|
||||||
|
@ -408,7 +410,7 @@ void MediaManager::configureSendingVideoIfNeeded() {
|
||||||
_videoChannel->AddSendStream(cricket::StreamParams::CreateLegacy(_ssrcVideo.outgoing));
|
_videoChannel->AddSendStream(cricket::StreamParams::CreateLegacy(_ssrcVideo.outgoing));
|
||||||
}
|
}
|
||||||
|
|
||||||
adjustBitratePreferences();
|
adjustBitratePreferences(true);
|
||||||
}
|
}
|
||||||
|
|
||||||
void MediaManager::checkIsSendingVideoChanged(bool wasSending) {
|
void MediaManager::checkIsSendingVideoChanged(bool wasSending) {
|
||||||
|
@ -432,46 +434,42 @@ void MediaManager::checkIsSendingVideoChanged(bool wasSending) {
|
||||||
_videoChannel->SetVideoSend(_ssrcVideo.fecOutgoing, NULL, nullptr);
|
_videoChannel->SetVideoSend(_ssrcVideo.fecOutgoing, NULL, nullptr);
|
||||||
}
|
}
|
||||||
|
|
||||||
adjustBitratePreferences();
|
adjustBitratePreferences(true);
|
||||||
}
|
}
|
||||||
|
|
||||||
void MediaManager::adjustBitratePreferences() {
|
int MediaManager::getMaxVideoBitrate() const {
|
||||||
|
return (_enableHighBitrateVideo && _isLowCostNetwork) ? 2000000 : 800000;
|
||||||
|
}
|
||||||
|
|
||||||
|
void MediaManager::adjustBitratePreferences(bool resetStartBitrate) {
|
||||||
if (computeIsSendingVideo()) {
|
if (computeIsSendingVideo()) {
|
||||||
webrtc::BitrateConstraints preferences;
|
webrtc::BitrateConstraints preferences;
|
||||||
preferences.min_bitrate_bps = 64000;
|
preferences.min_bitrate_bps = 64000;
|
||||||
|
if (resetStartBitrate) {
|
||||||
preferences.start_bitrate_bps = 400000;
|
preferences.start_bitrate_bps = 400000;
|
||||||
preferences.max_bitrate_bps = _enableHighBitrateVideo ? 1600000 : 800000;
|
}
|
||||||
|
preferences.max_bitrate_bps = getMaxVideoBitrate();
|
||||||
|
|
||||||
_call->GetTransportControllerSend()->SetSdpBitrateParameters(preferences);
|
_call->GetTransportControllerSend()->SetSdpBitrateParameters(preferences);
|
||||||
|
|
||||||
webrtc::BitrateSettings settings;
|
|
||||||
settings.min_bitrate_bps = 64000;
|
|
||||||
settings.start_bitrate_bps = 400000;
|
|
||||||
settings.max_bitrate_bps = _enableHighBitrateVideo ? 1600000 : 800000;
|
|
||||||
|
|
||||||
_call->GetTransportControllerSend()->SetClientBitratePreferences(settings);
|
|
||||||
} else {
|
} else {
|
||||||
webrtc::BitrateConstraints preferences;
|
webrtc::BitrateConstraints preferences;
|
||||||
if (_didConfigureVideo) {
|
if (_didConfigureVideo) {
|
||||||
// After we have configured outgoing video, RTCP stops working for outgoing audio
|
// After we have configured outgoing video, RTCP stops working for outgoing audio
|
||||||
// TODO: investigate
|
// TODO: investigate
|
||||||
preferences.min_bitrate_bps = 16000;
|
preferences.min_bitrate_bps = 16000;
|
||||||
|
if (resetStartBitrate) {
|
||||||
preferences.start_bitrate_bps = 16000;
|
preferences.start_bitrate_bps = 16000;
|
||||||
|
}
|
||||||
preferences.max_bitrate_bps = 32000;
|
preferences.max_bitrate_bps = 32000;
|
||||||
} else {
|
} else {
|
||||||
preferences.min_bitrate_bps = 8000;
|
preferences.min_bitrate_bps = 8000;
|
||||||
|
if (resetStartBitrate) {
|
||||||
preferences.start_bitrate_bps = 16000;
|
preferences.start_bitrate_bps = 16000;
|
||||||
|
}
|
||||||
preferences.max_bitrate_bps = 32000;
|
preferences.max_bitrate_bps = 32000;
|
||||||
}
|
}
|
||||||
|
|
||||||
_call->GetTransportControllerSend()->SetSdpBitrateParameters(preferences);
|
_call->GetTransportControllerSend()->SetSdpBitrateParameters(preferences);
|
||||||
|
|
||||||
webrtc::BitrateSettings settings;
|
|
||||||
settings.min_bitrate_bps = preferences.min_bitrate_bps;
|
|
||||||
settings.start_bitrate_bps = preferences.start_bitrate_bps;
|
|
||||||
settings.max_bitrate_bps = preferences.max_bitrate_bps;
|
|
||||||
|
|
||||||
_call->GetTransportControllerSend()->SetClientBitratePreferences(settings);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -586,6 +584,14 @@ void MediaManager::remoteVideoStateUpdated(VideoState videoState) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void MediaManager::setIsCurrentNetworkLowCost(bool isCurrentNetworkLowCost) {
|
||||||
|
if (_isLowCostNetwork != isCurrentNetworkLowCost) {
|
||||||
|
_isLowCostNetwork = isCurrentNetworkLowCost;
|
||||||
|
RTC_LOG(LS_INFO) << "MediaManager isLowCostNetwork updated: " << isCurrentNetworkLowCost ? 1 : 0;
|
||||||
|
adjustBitratePreferences(false);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
MediaManager::NetworkInterfaceImpl::NetworkInterfaceImpl(MediaManager *mediaManager, bool isVideo) :
|
MediaManager::NetworkInterfaceImpl::NetworkInterfaceImpl(MediaManager *mediaManager, bool isVideo) :
|
||||||
_mediaManager(mediaManager),
|
_mediaManager(mediaManager),
|
||||||
_isVideo(isVideo) {
|
_isVideo(isVideo) {
|
||||||
|
|
|
@ -42,7 +42,8 @@ public:
|
||||||
std::function<void(Message &&)> sendTransportMessage,
|
std::function<void(Message &&)> sendTransportMessage,
|
||||||
std::function<void(int)> signalBarsUpdated,
|
std::function<void(int)> signalBarsUpdated,
|
||||||
float localPreferredVideoAspectRatio,
|
float localPreferredVideoAspectRatio,
|
||||||
bool enableHighBitrateVideo);
|
bool enableHighBitrateVideo,
|
||||||
|
std::vector<std::string> preferredCodecs);
|
||||||
~MediaManager();
|
~MediaManager();
|
||||||
|
|
||||||
void start();
|
void start();
|
||||||
|
@ -53,6 +54,7 @@ public:
|
||||||
void setIncomingVideoOutput(std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink);
|
void setIncomingVideoOutput(std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink);
|
||||||
void receiveMessage(DecryptedMessage &&message);
|
void receiveMessage(DecryptedMessage &&message);
|
||||||
void remoteVideoStateUpdated(VideoState videoState);
|
void remoteVideoStateUpdated(VideoState videoState);
|
||||||
|
void setIsCurrentNetworkLowCost(bool isCurrentNetworkLowCost);
|
||||||
|
|
||||||
private:
|
private:
|
||||||
struct SSRC {
|
struct SSRC {
|
||||||
|
@ -86,7 +88,8 @@ private:
|
||||||
void checkIsSendingVideoChanged(bool wasSending);
|
void checkIsSendingVideoChanged(bool wasSending);
|
||||||
bool videoCodecsNegotiated() const;
|
bool videoCodecsNegotiated() const;
|
||||||
|
|
||||||
void adjustBitratePreferences();
|
int getMaxVideoBitrate() const;
|
||||||
|
void adjustBitratePreferences(bool resetStartBitrate);
|
||||||
bool computeIsReceivingVideo() const;
|
bool computeIsReceivingVideo() const;
|
||||||
void checkIsReceivingVideoChanged(bool wasReceiving);
|
void checkIsReceivingVideoChanged(bool wasReceiving);
|
||||||
|
|
||||||
|
@ -133,6 +136,7 @@ private:
|
||||||
float _localPreferredVideoAspectRatio = 0.0f;
|
float _localPreferredVideoAspectRatio = 0.0f;
|
||||||
float _preferredAspectRatio = 0.0f;
|
float _preferredAspectRatio = 0.0f;
|
||||||
bool _enableHighBitrateVideo = false;
|
bool _enableHighBitrateVideo = false;
|
||||||
|
bool _isLowCostNetwork = false;
|
||||||
|
|
||||||
std::unique_ptr<MediaManager::NetworkInterfaceImpl> _audioNetworkInterface;
|
std::unique_ptr<MediaManager::NetworkInterfaceImpl> _audioNetworkInterface;
|
||||||
std::unique_ptr<MediaManager::NetworkInterfaceImpl> _videoNetworkInterface;
|
std::unique_ptr<MediaManager::NetworkInterfaceImpl> _videoNetworkInterface;
|
||||||
|
|
|
@ -264,6 +264,20 @@ bool Deserialize(RemoteBatteryLevelIsLowMessage &to, rtc::ByteBufferReader &read
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void Serialize(rtc::ByteBufferWriter &to, const RemoteNetworkTypeMessage &from, bool singleMessagePacket) {
|
||||||
|
to.WriteUInt8(from.isLowCost ? 1 : 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
bool Deserialize(RemoteNetworkTypeMessage &to, rtc::ByteBufferReader &reader, bool singleMessagePacket) {
|
||||||
|
uint8_t value = 0;
|
||||||
|
if (!reader.ReadUInt8(&value)) {
|
||||||
|
RTC_LOG(LS_ERROR) << "Could not read isLowCost.";
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
to.isLowCost = (value != 0);
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
enum class TryResult : uint8_t {
|
enum class TryResult : uint8_t {
|
||||||
Success,
|
Success,
|
||||||
TryNext,
|
TryNext,
|
||||||
|
|
|
@ -97,6 +97,13 @@ struct RemoteBatteryLevelIsLowMessage {
|
||||||
bool batteryLow = false;
|
bool batteryLow = false;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
struct RemoteNetworkTypeMessage {
|
||||||
|
static constexpr uint8_t kId = 10;
|
||||||
|
static constexpr bool kRequiresAck = true;
|
||||||
|
|
||||||
|
bool isLowCost = false;
|
||||||
|
};
|
||||||
|
|
||||||
// To add a new message you should:
|
// To add a new message you should:
|
||||||
// 1. Add the message struct.
|
// 1. Add the message struct.
|
||||||
// 2. Add the message to the variant in Message struct.
|
// 2. Add the message to the variant in Message struct.
|
||||||
|
@ -112,7 +119,8 @@ struct Message {
|
||||||
VideoDataMessage,
|
VideoDataMessage,
|
||||||
UnstructuredDataMessage,
|
UnstructuredDataMessage,
|
||||||
VideoParametersMessage,
|
VideoParametersMessage,
|
||||||
RemoteBatteryLevelIsLowMessage> data;
|
RemoteBatteryLevelIsLowMessage,
|
||||||
|
RemoteNetworkTypeMessage> data;
|
||||||
};
|
};
|
||||||
|
|
||||||
rtc::CopyOnWriteBuffer SerializeMessageWithSeq(
|
rtc::CopyOnWriteBuffer SerializeMessageWithSeq(
|
||||||
|
|
|
@ -33,6 +33,8 @@ NetworkManager::NetworkManager(
|
||||||
std::function<void(Message &&)> sendSignalingMessage,
|
std::function<void(Message &&)> sendSignalingMessage,
|
||||||
std::function<void(int delayMs, int cause)> sendTransportServiceAsync) :
|
std::function<void(int delayMs, int cause)> sendTransportServiceAsync) :
|
||||||
_thread(thread),
|
_thread(thread),
|
||||||
|
_enableP2P(enableP2P),
|
||||||
|
_rtcServers(rtcServers),
|
||||||
_transport(
|
_transport(
|
||||||
EncryptedConnection::Type::Transport,
|
EncryptedConnection::Type::Transport,
|
||||||
encryptionKey,
|
encryptionKey,
|
||||||
|
@ -43,14 +45,28 @@ _transportMessageReceived(std::move(transportMessageReceived)),
|
||||||
_sendSignalingMessage(std::move(sendSignalingMessage)),
|
_sendSignalingMessage(std::move(sendSignalingMessage)),
|
||||||
_localIceParameters(rtc::CreateRandomString(cricket::ICE_UFRAG_LENGTH), rtc::CreateRandomString(cricket::ICE_PWD_LENGTH)) {
|
_localIceParameters(rtc::CreateRandomString(cricket::ICE_UFRAG_LENGTH), rtc::CreateRandomString(cricket::ICE_PWD_LENGTH)) {
|
||||||
assert(_thread->IsCurrent());
|
assert(_thread->IsCurrent());
|
||||||
|
}
|
||||||
|
|
||||||
|
NetworkManager::~NetworkManager() {
|
||||||
|
assert(_thread->IsCurrent());
|
||||||
|
|
||||||
|
RTC_LOG(LS_INFO) << "NetworkManager::~NetworkManager()";
|
||||||
|
|
||||||
|
_transportChannel.reset();
|
||||||
|
_asyncResolverFactory.reset();
|
||||||
|
_portAllocator.reset();
|
||||||
|
_networkManager.reset();
|
||||||
|
_socketFactory.reset();
|
||||||
|
}
|
||||||
|
|
||||||
|
void NetworkManager::start() {
|
||||||
_socketFactory.reset(new rtc::BasicPacketSocketFactory(_thread));
|
_socketFactory.reset(new rtc::BasicPacketSocketFactory(_thread));
|
||||||
|
|
||||||
_networkManager = std::make_unique<rtc::BasicNetworkManager>();
|
_networkManager = std::make_unique<rtc::BasicNetworkManager>();
|
||||||
_portAllocator.reset(new cricket::BasicPortAllocator(_networkManager.get(), _socketFactory.get(), nullptr, nullptr));
|
_portAllocator.reset(new cricket::BasicPortAllocator(_networkManager.get(), _socketFactory.get(), nullptr, nullptr));
|
||||||
|
|
||||||
uint32_t flags = cricket::PORTALLOCATOR_DISABLE_TCP;
|
uint32_t flags = cricket::PORTALLOCATOR_DISABLE_TCP;
|
||||||
if (!enableP2P) {
|
if (!_enableP2P) {
|
||||||
flags |= cricket::PORTALLOCATOR_DISABLE_UDP;
|
flags |= cricket::PORTALLOCATOR_DISABLE_UDP;
|
||||||
flags |= cricket::PORTALLOCATOR_DISABLE_STUN;
|
flags |= cricket::PORTALLOCATOR_DISABLE_STUN;
|
||||||
}
|
}
|
||||||
|
@ -60,7 +76,7 @@ _localIceParameters(rtc::CreateRandomString(cricket::ICE_UFRAG_LENGTH), rtc::Cre
|
||||||
cricket::ServerAddresses stunServers;
|
cricket::ServerAddresses stunServers;
|
||||||
std::vector<cricket::RelayServerConfig> turnServers;
|
std::vector<cricket::RelayServerConfig> turnServers;
|
||||||
|
|
||||||
for (auto &server : rtcServers) {
|
for (auto &server : _rtcServers) {
|
||||||
if (server.isTurn) {
|
if (server.isTurn) {
|
||||||
turnServers.push_back(cricket::RelayServerConfig(
|
turnServers.push_back(cricket::RelayServerConfig(
|
||||||
rtc::SocketAddress(server.host, server.port),
|
rtc::SocketAddress(server.host, server.port),
|
||||||
|
@ -103,18 +119,10 @@ _localIceParameters(rtc::CreateRandomString(cricket::ICE_UFRAG_LENGTH), rtc::Cre
|
||||||
_transportChannel->MaybeStartGathering();
|
_transportChannel->MaybeStartGathering();
|
||||||
|
|
||||||
_transportChannel->SetRemoteIceMode(cricket::ICEMODE_FULL);
|
_transportChannel->SetRemoteIceMode(cricket::ICEMODE_FULL);
|
||||||
}
|
|
||||||
|
|
||||||
NetworkManager::~NetworkManager() {
|
_lastNetworkActivityMs = rtc::TimeMillis();
|
||||||
assert(_thread->IsCurrent());
|
|
||||||
|
|
||||||
RTC_LOG(LS_INFO) << "NetworkManager::~NetworkManager()";
|
checkConnectionTimeout();
|
||||||
|
|
||||||
_transportChannel.reset();
|
|
||||||
_asyncResolverFactory.reset();
|
|
||||||
_portAllocator.reset();
|
|
||||||
_networkManager.reset();
|
|
||||||
_socketFactory.reset();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
void NetworkManager::receiveSignalingMessage(DecryptedMessage &&message) {
|
void NetworkManager::receiveSignalingMessage(DecryptedMessage &&message) {
|
||||||
|
@ -143,6 +151,7 @@ uint32_t NetworkManager::sendMessage(const Message &message) {
|
||||||
if (const auto prepared = _transport.prepareForSending(message)) {
|
if (const auto prepared = _transport.prepareForSending(message)) {
|
||||||
rtc::PacketOptions packetOptions;
|
rtc::PacketOptions packetOptions;
|
||||||
_transportChannel->SendPacket((const char *)prepared->bytes.data(), prepared->bytes.size(), packetOptions, 0);
|
_transportChannel->SendPacket((const char *)prepared->bytes.data(), prepared->bytes.size(), packetOptions, 0);
|
||||||
|
addTrafficStats(prepared->bytes.size(), false);
|
||||||
return prepared->counter;
|
return prepared->counter;
|
||||||
}
|
}
|
||||||
return 0;
|
return 0;
|
||||||
|
@ -152,9 +161,45 @@ void NetworkManager::sendTransportService(int cause) {
|
||||||
if (const auto prepared = _transport.prepareForSendingService(cause)) {
|
if (const auto prepared = _transport.prepareForSendingService(cause)) {
|
||||||
rtc::PacketOptions packetOptions;
|
rtc::PacketOptions packetOptions;
|
||||||
_transportChannel->SendPacket((const char *)prepared->bytes.data(), prepared->bytes.size(), packetOptions, 0);
|
_transportChannel->SendPacket((const char *)prepared->bytes.data(), prepared->bytes.size(), packetOptions, 0);
|
||||||
|
addTrafficStats(prepared->bytes.size(), false);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void NetworkManager::setIsLocalNetworkLowCost(bool isLocalNetworkLowCost) {
|
||||||
|
_isLocalNetworkLowCost = isLocalNetworkLowCost;
|
||||||
|
}
|
||||||
|
|
||||||
|
TrafficStats NetworkManager::getNetworkStats() {
|
||||||
|
TrafficStats stats;
|
||||||
|
stats.bytesSentWifi = _trafficStatsWifi.outgoing;
|
||||||
|
stats.bytesReceivedWifi = _trafficStatsWifi.incoming;
|
||||||
|
stats.bytesSentMobile = _trafficStatsCellular.outgoing;
|
||||||
|
stats.bytesReceivedMobile = _trafficStatsCellular.incoming;
|
||||||
|
return stats;
|
||||||
|
}
|
||||||
|
|
||||||
|
void NetworkManager::checkConnectionTimeout() {
|
||||||
|
const auto weak = std::weak_ptr<NetworkManager>(shared_from_this());
|
||||||
|
_thread->PostDelayedTask(RTC_FROM_HERE, [weak]() {
|
||||||
|
auto strong = weak.lock();
|
||||||
|
if (!strong) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
int64_t currentTimestamp = rtc::TimeMillis();
|
||||||
|
const int64_t maxTimeout = 20000;
|
||||||
|
|
||||||
|
if (strong->_lastNetworkActivityMs + maxTimeout < currentTimestamp) {
|
||||||
|
NetworkManager::State emitState;
|
||||||
|
emitState.isReadyToSendData = false;
|
||||||
|
emitState.isFailed = true;
|
||||||
|
strong->_stateUpdated(emitState);
|
||||||
|
}
|
||||||
|
|
||||||
|
strong->checkConnectionTimeout();
|
||||||
|
}, 1000);
|
||||||
|
}
|
||||||
|
|
||||||
void NetworkManager::candidateGathered(cricket::IceTransportInternal *transport, const cricket::Candidate &candidate) {
|
void NetworkManager::candidateGathered(cricket::IceTransportInternal *transport, const cricket::Candidate &candidate) {
|
||||||
assert(_thread->IsCurrent());
|
assert(_thread->IsCurrent());
|
||||||
_sendSignalingMessage({ CandidatesListMessage{ { 1, candidate }, _localIceParameters } });
|
_sendSignalingMessage({ CandidatesListMessage{ { 1, candidate }, _localIceParameters } });
|
||||||
|
@ -189,6 +234,10 @@ void NetworkManager::transportReadyToSend(cricket::IceTransportInternal *transpo
|
||||||
void NetworkManager::transportPacketReceived(rtc::PacketTransportInternal *transport, const char *bytes, size_t size, const int64_t ×tamp, int unused) {
|
void NetworkManager::transportPacketReceived(rtc::PacketTransportInternal *transport, const char *bytes, size_t size, const int64_t ×tamp, int unused) {
|
||||||
assert(_thread->IsCurrent());
|
assert(_thread->IsCurrent());
|
||||||
|
|
||||||
|
_lastNetworkActivityMs = rtc::TimeMillis();
|
||||||
|
|
||||||
|
addTrafficStats(size, true);
|
||||||
|
|
||||||
if (auto decrypted = _transport.handleIncomingPacket(bytes, size)) {
|
if (auto decrypted = _transport.handleIncomingPacket(bytes, size)) {
|
||||||
if (_transportMessageReceived) {
|
if (_transportMessageReceived) {
|
||||||
_transportMessageReceived(std::move(decrypted->main));
|
_transportMessageReceived(std::move(decrypted->main));
|
||||||
|
@ -212,4 +261,20 @@ void NetworkManager::transportRouteChanged(absl::optional<rtc::NetworkRoute> rou
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void NetworkManager::addTrafficStats(int64_t byteCount, bool isIncoming) {
|
||||||
|
if (_isLocalNetworkLowCost) {
|
||||||
|
if (isIncoming) {
|
||||||
|
_trafficStatsWifi.incoming += byteCount;
|
||||||
|
} else {
|
||||||
|
_trafficStatsWifi.outgoing += byteCount;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if (isIncoming) {
|
||||||
|
_trafficStatsCellular.incoming += byteCount;
|
||||||
|
} else {
|
||||||
|
_trafficStatsCellular.outgoing += byteCount;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
} // namespace tgcalls
|
} // namespace tgcalls
|
||||||
|
|
|
@ -34,10 +34,16 @@ namespace tgcalls {
|
||||||
|
|
||||||
struct Message;
|
struct Message;
|
||||||
|
|
||||||
class NetworkManager : public sigslot::has_slots<> {
|
class NetworkManager : public sigslot::has_slots<>, public std::enable_shared_from_this<NetworkManager> {
|
||||||
public:
|
public:
|
||||||
struct State {
|
struct State {
|
||||||
bool isReadyToSendData = false;
|
bool isReadyToSendData = false;
|
||||||
|
bool isFailed = false;
|
||||||
|
};
|
||||||
|
|
||||||
|
struct InterfaceTrafficStats {
|
||||||
|
int64_t incoming = 0;
|
||||||
|
int64_t outgoing = 0;
|
||||||
};
|
};
|
||||||
|
|
||||||
NetworkManager(
|
NetworkManager(
|
||||||
|
@ -51,19 +57,26 @@ public:
|
||||||
std::function<void(int delayMs, int cause)> sendTransportServiceAsync);
|
std::function<void(int delayMs, int cause)> sendTransportServiceAsync);
|
||||||
~NetworkManager();
|
~NetworkManager();
|
||||||
|
|
||||||
|
void start();
|
||||||
void receiveSignalingMessage(DecryptedMessage &&message);
|
void receiveSignalingMessage(DecryptedMessage &&message);
|
||||||
uint32_t sendMessage(const Message &message);
|
uint32_t sendMessage(const Message &message);
|
||||||
void sendTransportService(int cause);
|
void sendTransportService(int cause);
|
||||||
|
void setIsLocalNetworkLowCost(bool isLocalNetworkLowCost);
|
||||||
|
TrafficStats getNetworkStats();
|
||||||
|
|
||||||
private:
|
private:
|
||||||
|
void checkConnectionTimeout();
|
||||||
void candidateGathered(cricket::IceTransportInternal *transport, const cricket::Candidate &candidate);
|
void candidateGathered(cricket::IceTransportInternal *transport, const cricket::Candidate &candidate);
|
||||||
void candidateGatheringState(cricket::IceTransportInternal *transport);
|
void candidateGatheringState(cricket::IceTransportInternal *transport);
|
||||||
void transportStateChanged(cricket::IceTransportInternal *transport);
|
void transportStateChanged(cricket::IceTransportInternal *transport);
|
||||||
void transportReadyToSend(cricket::IceTransportInternal *transport);
|
void transportReadyToSend(cricket::IceTransportInternal *transport);
|
||||||
void transportPacketReceived(rtc::PacketTransportInternal *transport, const char *bytes, size_t size, const int64_t ×tamp, int unused);
|
void transportPacketReceived(rtc::PacketTransportInternal *transport, const char *bytes, size_t size, const int64_t ×tamp, int unused);
|
||||||
void transportRouteChanged(absl::optional<rtc::NetworkRoute> route);
|
void transportRouteChanged(absl::optional<rtc::NetworkRoute> route);
|
||||||
|
void addTrafficStats(int64_t byteCount, bool isIncoming);
|
||||||
|
|
||||||
rtc::Thread *_thread = nullptr;
|
rtc::Thread *_thread = nullptr;
|
||||||
|
bool _enableP2P = false;
|
||||||
|
std::vector<RtcServer> _rtcServers;
|
||||||
EncryptedConnection _transport;
|
EncryptedConnection _transport;
|
||||||
bool _isOutgoing = false;
|
bool _isOutgoing = false;
|
||||||
std::function<void(const NetworkManager::State &)> _stateUpdated;
|
std::function<void(const NetworkManager::State &)> _stateUpdated;
|
||||||
|
@ -78,6 +91,11 @@ private:
|
||||||
|
|
||||||
PeerIceParameters _localIceParameters;
|
PeerIceParameters _localIceParameters;
|
||||||
absl::optional<PeerIceParameters> _remoteIceParameters;
|
absl::optional<PeerIceParameters> _remoteIceParameters;
|
||||||
|
|
||||||
|
bool _isLocalNetworkLowCost = false;
|
||||||
|
int64_t _lastNetworkActivityMs = 0;
|
||||||
|
InterfaceTrafficStats _trafficStatsWifi;
|
||||||
|
InterfaceTrafficStats _trafficStatsCellular;
|
||||||
};
|
};
|
||||||
|
|
||||||
} // namespace tgcalls
|
} // namespace tgcalls
|
||||||
|
|
|
@ -11,15 +11,17 @@ VideoCaptureInterfaceObject::VideoCaptureInterfaceObject(std::shared_ptr<Platfor
|
||||||
_videoSource = PlatformInterface::SharedInstance()->makeVideoSource(Manager::getMediaThread(), MediaManager::getWorkerThread());
|
_videoSource = PlatformInterface::SharedInstance()->makeVideoSource(Manager::getMediaThread(), MediaManager::getWorkerThread());
|
||||||
_platformContext = platformContext;
|
_platformContext = platformContext;
|
||||||
//this should outlive the capturer
|
//this should outlive the capturer
|
||||||
|
if (_videoSource) {
|
||||||
_videoCapturer = PlatformInterface::SharedInstance()->makeVideoCapturer(_videoSource, _useFrontCamera, [this](VideoState state) {
|
_videoCapturer = PlatformInterface::SharedInstance()->makeVideoCapturer(_videoSource, _useFrontCamera, [this](VideoState state) {
|
||||||
if (this->_stateUpdated) {
|
if (this->_stateUpdated) {
|
||||||
this->_stateUpdated(state);
|
this->_stateUpdated(state);
|
||||||
}
|
}
|
||||||
}, platformContext);
|
}, platformContext, _videoCapturerResolution);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
VideoCaptureInterfaceObject::~VideoCaptureInterfaceObject() {
|
VideoCaptureInterfaceObject::~VideoCaptureInterfaceObject() {
|
||||||
if (_currentUncroppedSink != nullptr) {
|
if (_videoCapturer && _currentUncroppedSink != nullptr) {
|
||||||
//_videoSource->RemoveSink(_currentSink.get());
|
//_videoSource->RemoveSink(_currentSink.get());
|
||||||
_videoCapturer->setUncroppedOutput(nullptr);
|
_videoCapturer->setUncroppedOutput(nullptr);
|
||||||
}
|
}
|
||||||
|
@ -30,30 +32,52 @@ void VideoCaptureInterfaceObject::switchCamera() {
|
||||||
if (_videoCapturer && _currentUncroppedSink) {
|
if (_videoCapturer && _currentUncroppedSink) {
|
||||||
_videoCapturer->setUncroppedOutput(nullptr);
|
_videoCapturer->setUncroppedOutput(nullptr);
|
||||||
}
|
}
|
||||||
|
if (_videoSource) {
|
||||||
_videoCapturer = PlatformInterface::SharedInstance()->makeVideoCapturer(_videoSource, _useFrontCamera, [this](VideoState state) {
|
_videoCapturer = PlatformInterface::SharedInstance()->makeVideoCapturer(_videoSource, _useFrontCamera, [this](VideoState state) {
|
||||||
if (this->_stateUpdated) {
|
if (this->_stateUpdated) {
|
||||||
this->_stateUpdated(state);
|
this->_stateUpdated(state);
|
||||||
}
|
}
|
||||||
}, _platformContext);
|
}, _platformContext, _videoCapturerResolution);
|
||||||
|
}
|
||||||
|
if (_videoCapturer) {
|
||||||
if (_currentUncroppedSink) {
|
if (_currentUncroppedSink) {
|
||||||
_videoCapturer->setUncroppedOutput(_currentUncroppedSink);
|
_videoCapturer->setUncroppedOutput(_currentUncroppedSink);
|
||||||
}
|
}
|
||||||
_videoCapturer->setState(_state);
|
_videoCapturer->setState(_state);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void VideoCaptureInterfaceObject::setState(VideoState state) {
|
void VideoCaptureInterfaceObject::setState(VideoState state) {
|
||||||
if (_state != state) {
|
if (_state != state) {
|
||||||
_state = state;
|
_state = state;
|
||||||
|
if (_videoCapturer) {
|
||||||
_videoCapturer->setState(state);
|
_videoCapturer->setState(state);
|
||||||
}
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void VideoCaptureInterfaceObject::setPreferredAspectRatio(float aspectRatio) {
|
void VideoCaptureInterfaceObject::setPreferredAspectRatio(float aspectRatio) {
|
||||||
_videoCapturer->setPreferredCaptureAspectRatio(aspectRatio);
|
if (_videoCapturer) {
|
||||||
|
if (aspectRatio > 0.01 && _videoCapturerResolution.first != 0 && _videoCapturerResolution.second != 0) {
|
||||||
|
float originalWidth = (float)_videoCapturerResolution.first;
|
||||||
|
float originalHeight = (float)_videoCapturerResolution.second;
|
||||||
|
|
||||||
|
float width = (originalWidth > aspectRatio * originalHeight)
|
||||||
|
? int(std::round(aspectRatio * originalHeight))
|
||||||
|
: originalWidth;
|
||||||
|
float height = (originalWidth > aspectRatio * originalHeight)
|
||||||
|
? originalHeight
|
||||||
|
: int(std::round(originalHeight / aspectRatio));
|
||||||
|
|
||||||
|
PlatformInterface::SharedInstance()->adaptVideoSource(_videoSource, (int)width, (int)height, 30);
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void VideoCaptureInterfaceObject::setOutput(std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink) {
|
void VideoCaptureInterfaceObject::setOutput(std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink) {
|
||||||
|
if (_videoCapturer) {
|
||||||
_videoCapturer->setUncroppedOutput(sink);
|
_videoCapturer->setUncroppedOutput(sink);
|
||||||
|
}
|
||||||
_currentUncroppedSink = sink;
|
_currentUncroppedSink = sink;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -28,6 +28,7 @@ public:
|
||||||
private:
|
private:
|
||||||
std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> _currentUncroppedSink;
|
std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> _currentUncroppedSink;
|
||||||
std::shared_ptr<PlatformContext> _platformContext;
|
std::shared_ptr<PlatformContext> _platformContext;
|
||||||
|
std::pair<int, int> _videoCapturerResolution;
|
||||||
std::unique_ptr<VideoCapturerInterface> _videoCapturer;
|
std::unique_ptr<VideoCapturerInterface> _videoCapturer;
|
||||||
std::function<void(VideoState)> _stateUpdated;
|
std::function<void(VideoState)> _stateUpdated;
|
||||||
bool _useFrontCamera = true;
|
bool _useFrontCamera = true;
|
||||||
|
|
|
@ -155,7 +155,7 @@ onSignalBarsUpdated_(std::move(descriptor.signalBarsUpdated)) {
|
||||||
|
|
||||||
InstanceImplLegacy::~InstanceImplLegacy() {
|
InstanceImplLegacy::~InstanceImplLegacy() {
|
||||||
if (controller_) {
|
if (controller_) {
|
||||||
stop();
|
stop([](FinalState state){});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -273,7 +273,7 @@ PersistentState InstanceImplLegacy::getPersistentState() {
|
||||||
return {controller_->GetPersistentState()};
|
return {controller_->GetPersistentState()};
|
||||||
}
|
}
|
||||||
|
|
||||||
FinalState InstanceImplLegacy::stop() {
|
void InstanceImplLegacy::stop(std::function<void(FinalState)> completion) {
|
||||||
controller_->Stop();
|
controller_->Stop();
|
||||||
|
|
||||||
auto result = FinalState();
|
auto result = FinalState();
|
||||||
|
@ -285,7 +285,7 @@ FinalState InstanceImplLegacy::stop() {
|
||||||
delete controller_;
|
delete controller_;
|
||||||
controller_ = nullptr;
|
controller_ = nullptr;
|
||||||
|
|
||||||
return result;
|
completion(result);
|
||||||
}
|
}
|
||||||
|
|
||||||
void InstanceImplLegacy::ControllerStateCallback(tgvoip::VoIPController *controller, int state) {
|
void InstanceImplLegacy::ControllerStateCallback(tgvoip::VoIPController *controller, int state) {
|
||||||
|
@ -323,8 +323,10 @@ int InstanceImplLegacy::GetConnectionMaxLayer() {
|
||||||
return tgvoip::VoIPController::GetConnectionMaxLayer();
|
return tgvoip::VoIPController::GetConnectionMaxLayer();
|
||||||
}
|
}
|
||||||
|
|
||||||
std::string InstanceImplLegacy::GetVersion() {
|
std::vector<std::string> InstanceImplLegacy::GetVersions() {
|
||||||
return tgvoip::VoIPController::GetVersion();
|
std::vector<std::string> result;
|
||||||
|
result.push_back("2.4.4");
|
||||||
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
template <>
|
template <>
|
||||||
|
|
|
@ -14,7 +14,7 @@ public:
|
||||||
~InstanceImplLegacy();
|
~InstanceImplLegacy();
|
||||||
|
|
||||||
static int GetConnectionMaxLayer();
|
static int GetConnectionMaxLayer();
|
||||||
static std::string GetVersion();
|
static std::vector<std::string> GetVersions();
|
||||||
|
|
||||||
void receiveSignalingData(const std::vector<uint8_t> &data) override;
|
void receiveSignalingData(const std::vector<uint8_t> &data) override;
|
||||||
void setNetworkType(NetworkType networkType) override;
|
void setNetworkType(NetworkType networkType) override;
|
||||||
|
@ -35,7 +35,7 @@ public:
|
||||||
int64_t getPreferredRelayId() override;
|
int64_t getPreferredRelayId() override;
|
||||||
TrafficStats getTrafficStats() override;
|
TrafficStats getTrafficStats() override;
|
||||||
PersistentState getPersistentState() override;
|
PersistentState getPersistentState() override;
|
||||||
FinalState stop() override;
|
void stop(std::function<void(FinalState)> completion) override;
|
||||||
|
|
||||||
private:
|
private:
|
||||||
tgvoip::VoIPController *controller_;
|
tgvoip::VoIPController *controller_;
|
||||||
|
|
|
@ -27,7 +27,8 @@ public:
|
||||||
virtual std::unique_ptr<webrtc::VideoDecoderFactory> makeVideoDecoderFactory() = 0;
|
virtual std::unique_ptr<webrtc::VideoDecoderFactory> makeVideoDecoderFactory() = 0;
|
||||||
virtual bool supportsEncoding(const std::string &codecName) = 0;
|
virtual bool supportsEncoding(const std::string &codecName) = 0;
|
||||||
virtual rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> makeVideoSource(rtc::Thread *signalingThread, rtc::Thread *workerThread) = 0;
|
virtual rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> makeVideoSource(rtc::Thread *signalingThread, rtc::Thread *workerThread) = 0;
|
||||||
virtual std::unique_ptr<VideoCapturerInterface> makeVideoCapturer(rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> source, bool useFrontCamera, std::function<void(VideoState)> stateUpdated, std::shared_ptr<PlatformContext> platformContext) = 0;
|
virtual void adaptVideoSource(rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> videoSource, int width, int height, int fps) = 0;
|
||||||
|
virtual std::unique_ptr<VideoCapturerInterface> makeVideoCapturer(rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> source, bool useFrontCamera, std::function<void(VideoState)> stateUpdated, std::shared_ptr<PlatformContext> platformContext, std::pair<int, int> &outResolution) = 0;
|
||||||
|
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -20,6 +20,14 @@
|
||||||
|
|
||||||
namespace tgcalls {
|
namespace tgcalls {
|
||||||
|
|
||||||
|
void AndroidInterface::configurePlatformAudio() {
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
float AndroidInterface::getDisplayAspectRatio() {
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
std::unique_ptr<webrtc::VideoEncoderFactory> AndroidInterface::makeVideoEncoderFactory() {
|
std::unique_ptr<webrtc::VideoEncoderFactory> AndroidInterface::makeVideoEncoderFactory() {
|
||||||
JNIEnv *env = webrtc::AttachCurrentThreadIfNeeded();
|
JNIEnv *env = webrtc::AttachCurrentThreadIfNeeded();
|
||||||
webrtc::ScopedJavaLocalRef<jclass> factory_class =
|
webrtc::ScopedJavaLocalRef<jclass> factory_class =
|
||||||
|
@ -46,6 +54,10 @@ std::unique_ptr<webrtc::VideoDecoderFactory> AndroidInterface::makeVideoDecoderF
|
||||||
return webrtc::JavaToNativeVideoDecoderFactory(env, factory_object.obj());
|
return webrtc::JavaToNativeVideoDecoderFactory(env, factory_object.obj());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void AndroidInterface::adaptVideoSource(rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> videoSource, int width, int height, int fps) {
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> AndroidInterface::makeVideoSource(rtc::Thread *signalingThread, rtc::Thread *workerThread) {
|
rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> AndroidInterface::makeVideoSource(rtc::Thread *signalingThread, rtc::Thread *workerThread) {
|
||||||
JNIEnv *env = webrtc::AttachCurrentThreadIfNeeded();
|
JNIEnv *env = webrtc::AttachCurrentThreadIfNeeded();
|
||||||
_source = webrtc::CreateJavaVideoSource(env, signalingThread, false, false);
|
_source = webrtc::CreateJavaVideoSource(env, signalingThread, false, false);
|
||||||
|
@ -75,7 +87,7 @@ bool AndroidInterface::supportsEncoding(const std::string &codecName) {
|
||||||
return codecName == cricket::kVp8CodecName;
|
return codecName == cricket::kVp8CodecName;
|
||||||
}
|
}
|
||||||
|
|
||||||
std::unique_ptr<VideoCapturerInterface> AndroidInterface::makeVideoCapturer(rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> source, bool useFrontCamera, std::function<void(VideoState)> stateUpdated, std::shared_ptr<PlatformContext> platformContext) {
|
std::unique_ptr<VideoCapturerInterface> AndroidInterface::makeVideoCapturer(rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> source, bool useFrontCamera, std::function<void(VideoState)> stateUpdated, std::shared_ptr<PlatformContext> platformContext, std::pair<int, int> &outResolution) {
|
||||||
return std::make_unique<VideoCapturerInterfaceImpl>(_source, useFrontCamera, stateUpdated, platformContext);
|
return std::make_unique<VideoCapturerInterfaceImpl>(_source, useFrontCamera, stateUpdated, platformContext);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -9,11 +9,14 @@ namespace tgcalls {
|
||||||
|
|
||||||
class AndroidInterface : public PlatformInterface {
|
class AndroidInterface : public PlatformInterface {
|
||||||
public:
|
public:
|
||||||
|
void configurePlatformAudio() override;
|
||||||
|
float getDisplayAspectRatio() override;
|
||||||
std::unique_ptr<webrtc::VideoEncoderFactory> makeVideoEncoderFactory() override;
|
std::unique_ptr<webrtc::VideoEncoderFactory> makeVideoEncoderFactory() override;
|
||||||
std::unique_ptr<webrtc::VideoDecoderFactory> makeVideoDecoderFactory() override;
|
std::unique_ptr<webrtc::VideoDecoderFactory> makeVideoDecoderFactory() override;
|
||||||
bool supportsEncoding(const std::string &codecName) override;
|
bool supportsEncoding(const std::string &codecName) override;
|
||||||
rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> makeVideoSource(rtc::Thread *signalingThread, rtc::Thread *workerThread) override;
|
rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> makeVideoSource(rtc::Thread *signalingThread, rtc::Thread *workerThread) override;
|
||||||
std::unique_ptr<VideoCapturerInterface> makeVideoCapturer(rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> source, bool useFrontCamera, std::function<void(VideoState)> stateUpdated, std::shared_ptr<PlatformContext> platformContext) override;
|
void adaptVideoSource(rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> videoSource, int width, int height, int fps) override;
|
||||||
|
std::unique_ptr<VideoCapturerInterface> makeVideoCapturer(rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> source, bool useFrontCamera, std::function<void(VideoState)> stateUpdated, std::shared_ptr<PlatformContext> platformContext, std::pair<int, int> &outResolution) override;
|
||||||
|
|
||||||
private:
|
private:
|
||||||
rtc::scoped_refptr<webrtc::JavaVideoTrackSourceInterface> _source;
|
rtc::scoped_refptr<webrtc::JavaVideoTrackSourceInterface> _source;
|
||||||
|
|
|
@ -13,7 +13,8 @@ public:
|
||||||
std::unique_ptr<webrtc::VideoDecoderFactory> makeVideoDecoderFactory() override;
|
std::unique_ptr<webrtc::VideoDecoderFactory> makeVideoDecoderFactory() override;
|
||||||
bool supportsEncoding(const std::string &codecName) override;
|
bool supportsEncoding(const std::string &codecName) override;
|
||||||
rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> makeVideoSource(rtc::Thread *signalingThread, rtc::Thread *workerThread) override;
|
rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> makeVideoSource(rtc::Thread *signalingThread, rtc::Thread *workerThread) override;
|
||||||
std::unique_ptr<VideoCapturerInterface> makeVideoCapturer(rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> source, bool useFrontCamera, std::function<void(VideoState)> stateUpdated, std::shared_ptr<PlatformContext> platformContext) override;
|
virtual void adaptVideoSource(rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> videoSource, int width, int height, int fps) override;
|
||||||
|
std::unique_ptr<VideoCapturerInterface> makeVideoCapturer(rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> source, bool useFrontCamera, std::function<void(VideoState)> stateUpdated, std::shared_ptr<PlatformContext> platformContext, std::pair<int, int> &outResolution) override;
|
||||||
|
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -18,6 +18,12 @@
|
||||||
|
|
||||||
namespace tgcalls {
|
namespace tgcalls {
|
||||||
|
|
||||||
|
static webrtc::ObjCVideoTrackSource *getObjCVideoSource(const rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> nativeSource) {
|
||||||
|
webrtc::VideoTrackSourceProxy *proxy_source =
|
||||||
|
static_cast<webrtc::VideoTrackSourceProxy *>(nativeSource.get());
|
||||||
|
return static_cast<webrtc::ObjCVideoTrackSource *>(proxy_source->internal());
|
||||||
|
}
|
||||||
|
|
||||||
void DarwinInterface::configurePlatformAudio() {
|
void DarwinInterface::configurePlatformAudio() {
|
||||||
#ifdef WEBRTC_IOS
|
#ifdef WEBRTC_IOS
|
||||||
[RTCAudioSession sharedInstance].useManualAudio = true;
|
[RTCAudioSession sharedInstance].useManualAudio = true;
|
||||||
|
@ -45,7 +51,7 @@ bool DarwinInterface::supportsEncoding(const std::string &codecName) {
|
||||||
return [[AVAssetExportSession allExportPresets] containsObject:AVAssetExportPresetHEVCHighestQuality];
|
return [[AVAssetExportSession allExportPresets] containsObject:AVAssetExportPresetHEVCHighestQuality];
|
||||||
}
|
}
|
||||||
#elif defined WEBRTC_MAC // WEBRTC_IOS
|
#elif defined WEBRTC_MAC // WEBRTC_IOS
|
||||||
if (@available(macOS 10.13, *)) {
|
if (@available(macOS 10.14, *)) {
|
||||||
return [[AVAssetExportSession allExportPresets] containsObject:AVAssetExportPresetHEVCHighestQuality];
|
return [[AVAssetExportSession allExportPresets] containsObject:AVAssetExportPresetHEVCHighestQuality];
|
||||||
}
|
}
|
||||||
#endif // WEBRTC_IOS || WEBRTC_MAC
|
#endif // WEBRTC_IOS || WEBRTC_MAC
|
||||||
|
@ -54,11 +60,7 @@ bool DarwinInterface::supportsEncoding(const std::string &codecName) {
|
||||||
} else if (codecName == cricket::kVp8CodecName) {
|
} else if (codecName == cricket::kVp8CodecName) {
|
||||||
return true;
|
return true;
|
||||||
} else if (codecName == cricket::kVp9CodecName) {
|
} else if (codecName == cricket::kVp9CodecName) {
|
||||||
#ifndef WEBRTC_IOS
|
|
||||||
return true;
|
return true;
|
||||||
#else
|
|
||||||
return false;
|
|
||||||
#endif
|
|
||||||
}
|
}
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
@ -68,8 +70,12 @@ rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> DarwinInterface::makeVideo
|
||||||
return webrtc::VideoTrackSourceProxy::Create(signalingThread, workerThread, objCVideoTrackSource);
|
return webrtc::VideoTrackSourceProxy::Create(signalingThread, workerThread, objCVideoTrackSource);
|
||||||
}
|
}
|
||||||
|
|
||||||
std::unique_ptr<VideoCapturerInterface> DarwinInterface::makeVideoCapturer(rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> source, bool useFrontCamera, std::function<void(VideoState)> stateUpdated, std::shared_ptr<PlatformContext> platformContext) {
|
void DarwinInterface::adaptVideoSource(rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> videoSource, int width, int height, int fps) {
|
||||||
return std::make_unique<VideoCapturerInterfaceImpl>(source, useFrontCamera, stateUpdated);
|
getObjCVideoSource(videoSource)->OnOutputFormatRequest(width, height, fps);
|
||||||
|
}
|
||||||
|
|
||||||
|
std::unique_ptr<VideoCapturerInterface> DarwinInterface::makeVideoCapturer(rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> source, bool useFrontCamera, std::function<void(VideoState)> stateUpdated, std::shared_ptr<PlatformContext> platformContext, std::pair<int, int> &outResolution) {
|
||||||
|
return std::make_unique<VideoCapturerInterfaceImpl>(source, useFrontCamera, stateUpdated, outResolution);
|
||||||
}
|
}
|
||||||
|
|
||||||
std::unique_ptr<PlatformInterface> CreatePlatformInterface() {
|
std::unique_ptr<PlatformInterface> CreatePlatformInterface() {
|
||||||
|
|
|
@ -9,7 +9,11 @@
|
||||||
*/
|
*/
|
||||||
|
|
||||||
#import <Foundation/Foundation.h>
|
#import <Foundation/Foundation.h>
|
||||||
|
#ifdef WEBRTC_IOS
|
||||||
#import <UIKit/UIKit.h>
|
#import <UIKit/UIKit.h>
|
||||||
|
#else
|
||||||
|
#import <AppKit/AppKit.h>
|
||||||
|
#endif
|
||||||
|
|
||||||
#import "RTCMacros.h"
|
#import "RTCMacros.h"
|
||||||
#import "RTCVideoRenderer.h"
|
#import "RTCVideoRenderer.h"
|
||||||
|
@ -28,8 +32,13 @@ NS_ASSUME_NONNULL_BEGIN
|
||||||
* bounds using OpenGLES 2.0 or OpenGLES 3.0.
|
* bounds using OpenGLES 2.0 or OpenGLES 3.0.
|
||||||
*/
|
*/
|
||||||
RTC_OBJC_EXPORT
|
RTC_OBJC_EXPORT
|
||||||
NS_EXTENSION_UNAVAILABLE_IOS("Rendering not available in app extensions.")
|
@interface GLVideoView :
|
||||||
@interface GLVideoView : UIView <RTCVideoRenderer>
|
#ifdef WEBRTC_IOS
|
||||||
|
UIView
|
||||||
|
#else
|
||||||
|
NSView
|
||||||
|
#endif
|
||||||
|
<RTCVideoRenderer>
|
||||||
|
|
||||||
@property(nonatomic, weak) id<RTCVideoViewDelegate> delegate;
|
@property(nonatomic, weak) id<RTCVideoViewDelegate> delegate;
|
||||||
|
|
||||||
|
|
50
TMessagesProj/jni/tgcalls/platform/darwin/GLVideoViewMac.h
Normal file
50
TMessagesProj/jni/tgcalls/platform/darwin/GLVideoViewMac.h
Normal file
|
@ -0,0 +1,50 @@
|
||||||
|
/*
|
||||||
|
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
|
||||||
|
*
|
||||||
|
* Use of this source code is governed by a BSD-style license
|
||||||
|
* that can be found in the LICENSE file in the root of the source
|
||||||
|
* tree. An additional intellectual property rights grant can be found
|
||||||
|
* in the file PATENTS. All contributing project authors may
|
||||||
|
* be found in the AUTHORS file in the root of the source tree.
|
||||||
|
*/
|
||||||
|
|
||||||
|
#import <Foundation/Foundation.h>
|
||||||
|
|
||||||
|
#if !TARGET_OS_IPHONE
|
||||||
|
|
||||||
|
#import <AppKit/NSOpenGLView.h>
|
||||||
|
#import "api/media_stream_interface.h"
|
||||||
|
#import "RTCVideoRenderer.h"
|
||||||
|
#import "RTCVideoViewShading.h"
|
||||||
|
|
||||||
|
NS_ASSUME_NONNULL_BEGIN
|
||||||
|
|
||||||
|
@class GLVideoView;
|
||||||
|
|
||||||
|
@protocol GLVideoViewDelegate<RTCVideoViewDelegate> @end
|
||||||
|
|
||||||
|
@interface GLVideoView : NSView <RTCVideoRenderer>
|
||||||
|
|
||||||
|
@property(nonatomic, weak) id<GLVideoViewDelegate> delegate;
|
||||||
|
|
||||||
|
- (instancetype)initWithFrame:(NSRect)frameRect
|
||||||
|
pixelFormat:(NSOpenGLPixelFormat *)format
|
||||||
|
shader:(id<RTCVideoViewShading>)shader
|
||||||
|
NS_DESIGNATED_INITIALIZER;
|
||||||
|
|
||||||
|
|
||||||
|
@property(nonatomic, nullable) NSValue *rotationOverride;
|
||||||
|
|
||||||
|
@property (nonatomic, readwrite) int internalOrientation;
|
||||||
|
|
||||||
|
- (std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>>)getSink;
|
||||||
|
- (void)setOnFirstFrameReceived:(void (^ _Nullable)(float))onFirstFrameReceived;
|
||||||
|
- (void)internalSetOnOrientationUpdated:(void (^ _Nullable)(int))onOrientationUpdated;
|
||||||
|
- (void)internalSetOnIsMirroredUpdated:(void (^ _Nullable)(bool))onIsMirroredUpdated;
|
||||||
|
- (void)setVideoContentMode:(CALayerContentsGravity)mode;
|
||||||
|
- (void)setIsForceMirrored:(BOOL)forceMirrored;
|
||||||
|
@end
|
||||||
|
|
||||||
|
NS_ASSUME_NONNULL_END
|
||||||
|
|
||||||
|
#endif
|
491
TMessagesProj/jni/tgcalls/platform/darwin/GLVideoViewMac.mm
Normal file
491
TMessagesProj/jni/tgcalls/platform/darwin/GLVideoViewMac.mm
Normal file
|
@ -0,0 +1,491 @@
|
||||||
|
/*
|
||||||
|
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
|
||||||
|
*
|
||||||
|
* Use of this source code is governed by a BSD-style license
|
||||||
|
* that can be found in the LICENSE file in the root of the source
|
||||||
|
* tree. An additional intellectual property rights grant can be found
|
||||||
|
* in the file PATENTS. All contributing project authors may
|
||||||
|
* be found in the AUTHORS file in the root of the source tree.
|
||||||
|
*/
|
||||||
|
|
||||||
|
#import "GLVideoViewMac.h"
|
||||||
|
|
||||||
|
#import "TGRTCCVPixelBuffer.h"
|
||||||
|
|
||||||
|
#import <GLKit/GLKit.h>
|
||||||
|
|
||||||
|
#import "RTCDefaultShader.h"
|
||||||
|
#import "RTCDisplayLinkTimer.h"
|
||||||
|
#import "RTCI420TextureCache.h"
|
||||||
|
#import "base/RTCLogging.h"
|
||||||
|
#import "base/RTCVideoFrame.h"
|
||||||
|
#import "base/RTCVideoFrameBuffer.h"
|
||||||
|
#import "components/video_frame_buffer/RTCCVPixelBuffer.h"
|
||||||
|
#include "sdk/objc/native/api/video_frame.h"
|
||||||
|
#import "rtc_base/time_utils.h"
|
||||||
|
#include "sdk/objc/native/src/objc_frame_buffer.h"
|
||||||
|
|
||||||
|
namespace {
|
||||||
|
|
||||||
|
static RTCVideoFrame *customToObjCVideoFrame(const webrtc::VideoFrame &frame, RTCVideoRotation &rotation) {
|
||||||
|
rotation = RTCVideoRotation(frame.rotation());
|
||||||
|
RTCVideoFrame *videoFrame =
|
||||||
|
[[RTCVideoFrame alloc] initWithBuffer:webrtc::ToObjCVideoFrameBuffer(frame.video_frame_buffer())
|
||||||
|
rotation:rotation
|
||||||
|
timeStampNs:frame.timestamp_us() * rtc::kNumNanosecsPerMicrosec];
|
||||||
|
videoFrame.timeStamp = frame.timestamp();
|
||||||
|
|
||||||
|
return videoFrame;
|
||||||
|
}
|
||||||
|
|
||||||
|
class VideoRendererAdapterImpl : public rtc::VideoSinkInterface<webrtc::VideoFrame> {
|
||||||
|
public:
|
||||||
|
VideoRendererAdapterImpl(void (^frameReceived)(CGSize, RTCVideoFrame *, RTCVideoRotation)) {
|
||||||
|
_frameReceived = [frameReceived copy];
|
||||||
|
}
|
||||||
|
|
||||||
|
void OnFrame(const webrtc::VideoFrame& nativeVideoFrame) override {
|
||||||
|
RTCVideoRotation rotation = RTCVideoRotation_0;
|
||||||
|
RTCVideoFrame* videoFrame = customToObjCVideoFrame(nativeVideoFrame, rotation);
|
||||||
|
|
||||||
|
CGSize currentSize = (videoFrame.rotation % 180 == 0) ? CGSizeMake(videoFrame.width, videoFrame.height) : CGSizeMake(videoFrame.height, videoFrame.width);
|
||||||
|
|
||||||
|
if (_frameReceived) {
|
||||||
|
_frameReceived(currentSize, videoFrame, rotation);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private:
|
||||||
|
void (^_frameReceived)(CGSize, RTCVideoFrame *, RTCVideoRotation);
|
||||||
|
};
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
static CGSize scaleToFillSize(CGSize size, CGSize maxSize) {
|
||||||
|
if (size.width < 1.0f) {
|
||||||
|
size.width = 1.0f;
|
||||||
|
}
|
||||||
|
if (size.height < 1.0f) {
|
||||||
|
size.height = 1.0f;
|
||||||
|
}
|
||||||
|
if (size.width < maxSize.width) {
|
||||||
|
size.height = floor(maxSize.width * size.height / MAX(1.0f, size.width));
|
||||||
|
size.width = maxSize.width;
|
||||||
|
}
|
||||||
|
if (size.height < maxSize.height) {
|
||||||
|
size.width = floor(maxSize.height * size.width / MAX(1.0f, size.height));
|
||||||
|
size.height = maxSize.height;
|
||||||
|
}
|
||||||
|
return size;
|
||||||
|
}
|
||||||
|
|
||||||
|
static CGSize aspectFilled(CGSize from, CGSize to) {
|
||||||
|
CGFloat scale = MAX(from.width / MAX(1.0, to.width), from.height / MAX(1.0, to.height));
|
||||||
|
return NSMakeSize(ceil(to.width * scale), ceil(to.height * scale));
|
||||||
|
}
|
||||||
|
static CGSize aspectFitted(CGSize from, CGSize to) {
|
||||||
|
CGFloat scale = MAX(from.width / MAX(1.0, to.width), from.height / MAX(1.0, to.height));
|
||||||
|
return NSMakeSize(ceil(to.width * scale), ceil(to.height * scale));
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
|
||||||
|
func aspectFilled(_ size: CGSize) -> CGSize {
|
||||||
|
let scale = max(size.width / max(1.0, self.width), size.height / max(1.0, self.height))
|
||||||
|
return CGSize(width: ceil(self.width * scale), height: ceil(self.height * scale))
|
||||||
|
}
|
||||||
|
func fittedToWidthOrSmaller(_ width: CGFloat) -> CGSize {
|
||||||
|
let scale = min(1.0, width / max(1.0, self.width))
|
||||||
|
return CGSize(width: floor(self.width * scale), height: floor(self.height * scale))
|
||||||
|
}
|
||||||
|
|
||||||
|
func aspectFitted(_ size: CGSize) -> CGSize {
|
||||||
|
let scale = min(size.width / max(1.0, self.width), size.height / max(1.0, self.height))
|
||||||
|
return CGSize(width: ceil(self.width * scale), height: ceil(self.height * scale))
|
||||||
|
}
|
||||||
|
*/
|
||||||
|
|
||||||
|
|
||||||
|
#if !TARGET_OS_IPHONE
|
||||||
|
|
||||||
|
@interface OpenGLVideoView : NSOpenGLView
|
||||||
|
@property(atomic, strong) RTCVideoFrame *videoFrame;
|
||||||
|
@property(atomic, strong) RTCI420TextureCache *i420TextureCache;
|
||||||
|
|
||||||
|
- (void)drawFrame;
|
||||||
|
- (instancetype)initWithFrame:(NSRect)frame
|
||||||
|
pixelFormat:(NSOpenGLPixelFormat *)format
|
||||||
|
shader:(id<RTCVideoViewShading>)shader;
|
||||||
|
@end
|
||||||
|
|
||||||
|
static CVReturn OnDisplayLinkFired(CVDisplayLinkRef displayLink,
|
||||||
|
const CVTimeStamp *now,
|
||||||
|
const CVTimeStamp *outputTime,
|
||||||
|
CVOptionFlags flagsIn,
|
||||||
|
CVOptionFlags *flagsOut,
|
||||||
|
void *displayLinkContext) {
|
||||||
|
OpenGLVideoView *view =
|
||||||
|
(__bridge OpenGLVideoView *)displayLinkContext;
|
||||||
|
[view drawFrame];
|
||||||
|
return kCVReturnSuccess;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@implementation OpenGLVideoView {
|
||||||
|
CVDisplayLinkRef _displayLink;
|
||||||
|
RTCVideoFrame * _lastDrawnFrame;
|
||||||
|
id<RTCVideoViewShading> _shader;
|
||||||
|
|
||||||
|
int64_t _lastDrawnFrameTimeStampNs;
|
||||||
|
void (^_onFirstFrameReceived)(float);
|
||||||
|
bool _firstFrameReceivedReported;
|
||||||
|
}
|
||||||
|
|
||||||
|
@synthesize videoFrame = _videoFrame;
|
||||||
|
@synthesize i420TextureCache = _i420TextureCache;
|
||||||
|
|
||||||
|
- (instancetype)initWithFrame:(NSRect)frame
|
||||||
|
pixelFormat:(NSOpenGLPixelFormat *)format
|
||||||
|
shader:(id<RTCVideoViewShading>)shader {
|
||||||
|
if (self = [super initWithFrame:frame pixelFormat:format]) {
|
||||||
|
self->_shader = shader;
|
||||||
|
}
|
||||||
|
return self;
|
||||||
|
}
|
||||||
|
|
||||||
|
- (void)reshape {
|
||||||
|
[super reshape];
|
||||||
|
NSRect frame = [self frame];
|
||||||
|
[self ensureGLContext];
|
||||||
|
CGLLockContext([[self openGLContext] CGLContextObj]);
|
||||||
|
glViewport(0, 0, frame.size.width, frame.size.height);
|
||||||
|
CGLUnlockContext([[self openGLContext] CGLContextObj]);
|
||||||
|
}
|
||||||
|
|
||||||
|
- (void)lockFocus {
|
||||||
|
NSOpenGLContext *context = [self openGLContext];
|
||||||
|
[super lockFocus];
|
||||||
|
if ([context view] != self) {
|
||||||
|
[context setView:self];
|
||||||
|
}
|
||||||
|
[context makeCurrentContext];
|
||||||
|
}
|
||||||
|
|
||||||
|
- (void)prepareOpenGL {
|
||||||
|
[super prepareOpenGL];
|
||||||
|
[self ensureGLContext];
|
||||||
|
glDisable(GL_DITHER);
|
||||||
|
[self setupDisplayLink];
|
||||||
|
}
|
||||||
|
|
||||||
|
- (void)clearGLContext {
|
||||||
|
[self ensureGLContext];
|
||||||
|
self.i420TextureCache = nil;
|
||||||
|
[super clearGLContext];
|
||||||
|
}
|
||||||
|
|
||||||
|
- (void)drawRect:(NSRect)rect {
|
||||||
|
[self drawFrame];
|
||||||
|
}
|
||||||
|
|
||||||
|
- (void)drawFrame {
|
||||||
|
RTCVideoFrame *frame = self.videoFrame;
|
||||||
|
if (!frame || frame == _lastDrawnFrame) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
// This method may be called from CVDisplayLink callback which isn't on the
|
||||||
|
// main thread so we have to lock the GL context before drawing.
|
||||||
|
NSOpenGLContext *context = [self openGLContext];
|
||||||
|
CGLLockContext([context CGLContextObj]);
|
||||||
|
|
||||||
|
[self ensureGLContext];
|
||||||
|
glClear(GL_COLOR_BUFFER_BIT);
|
||||||
|
|
||||||
|
|
||||||
|
// Rendering native CVPixelBuffer is not supported on OS X.
|
||||||
|
// TODO(magjed): Add support for NV12 texture cache on OS X.
|
||||||
|
frame = [frame newI420VideoFrame];
|
||||||
|
if (!self.i420TextureCache) {
|
||||||
|
self.i420TextureCache = [[RTCI420TextureCache alloc] initWithContext:context];
|
||||||
|
}
|
||||||
|
RTCVideoRotation rotation = frame.rotation;
|
||||||
|
|
||||||
|
RTCI420TextureCache *i420TextureCache = self.i420TextureCache;
|
||||||
|
if (i420TextureCache) {
|
||||||
|
[i420TextureCache uploadFrameToTextures:frame];
|
||||||
|
[_shader applyShadingForFrameWithWidth:frame.width
|
||||||
|
height:frame.height
|
||||||
|
rotation:rotation
|
||||||
|
yPlane:i420TextureCache.yTexture
|
||||||
|
uPlane:i420TextureCache.uTexture
|
||||||
|
vPlane:i420TextureCache.vTexture];
|
||||||
|
[context flushBuffer];
|
||||||
|
_lastDrawnFrame = frame;
|
||||||
|
}
|
||||||
|
CGLUnlockContext([context CGLContextObj]);
|
||||||
|
|
||||||
|
if (!_firstFrameReceivedReported && _onFirstFrameReceived) {
|
||||||
|
_firstFrameReceivedReported = true;
|
||||||
|
float aspectRatio = (float)frame.width / (float)frame.height;
|
||||||
|
dispatch_async(dispatch_get_main_queue(), ^{
|
||||||
|
self->_onFirstFrameReceived(aspectRatio);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
- (void)setupDisplayLink {
|
||||||
|
if (_displayLink) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
// Synchronize buffer swaps with vertical refresh rate.
|
||||||
|
GLint swapInt = 1;
|
||||||
|
[[self openGLContext] setValues:&swapInt forParameter:NSOpenGLCPSwapInterval];
|
||||||
|
|
||||||
|
// Create display link.
|
||||||
|
CVDisplayLinkCreateWithActiveCGDisplays(&_displayLink);
|
||||||
|
CVDisplayLinkSetOutputCallback(_displayLink,
|
||||||
|
&OnDisplayLinkFired,
|
||||||
|
(__bridge void *)self);
|
||||||
|
// Set the display link for the current renderer.
|
||||||
|
CGLContextObj cglContext = [[self openGLContext] CGLContextObj];
|
||||||
|
CGLPixelFormatObj cglPixelFormat = [[self pixelFormat] CGLPixelFormatObj];
|
||||||
|
CVDisplayLinkSetCurrentCGDisplayFromOpenGLContext(
|
||||||
|
_displayLink, cglContext, cglPixelFormat);
|
||||||
|
CVDisplayLinkStart(_displayLink);
|
||||||
|
}
|
||||||
|
|
||||||
|
-(void)setFrameOrigin:(NSPoint)newOrigin {
|
||||||
|
[super setFrameOrigin:newOrigin];
|
||||||
|
}
|
||||||
|
|
||||||
|
- (void)teardownDisplayLink {
|
||||||
|
if (!_displayLink) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
CVDisplayLinkRelease(_displayLink);
|
||||||
|
_displayLink = NULL;
|
||||||
|
}
|
||||||
|
|
||||||
|
- (void)ensureGLContext {
|
||||||
|
NSOpenGLContext* context = [self openGLContext];
|
||||||
|
NSAssert(context, @"context shouldn't be nil");
|
||||||
|
if ([NSOpenGLContext currentContext] != context) {
|
||||||
|
[context makeCurrentContext];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
- (void)dealloc {
|
||||||
|
[self teardownDisplayLink];
|
||||||
|
}
|
||||||
|
|
||||||
|
- (void)setOnFirstFrameReceived:(void (^ _Nullable)(float))onFirstFrameReceived {
|
||||||
|
_onFirstFrameReceived = [onFirstFrameReceived copy];
|
||||||
|
_firstFrameReceivedReported = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@end
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
@interface GLVideoView ()
|
||||||
|
@property(nonatomic, strong) OpenGLVideoView *glView;
|
||||||
|
@end
|
||||||
|
|
||||||
|
@implementation GLVideoView {
|
||||||
|
|
||||||
|
CGSize _currentSize;
|
||||||
|
|
||||||
|
std::shared_ptr<VideoRendererAdapterImpl> _sink;
|
||||||
|
|
||||||
|
void (^_onOrientationUpdated)(int);
|
||||||
|
void (^_onIsMirroredUpdated)(bool);
|
||||||
|
|
||||||
|
bool _didSetShouldBeMirrored;
|
||||||
|
bool _shouldBeMirrored;
|
||||||
|
bool _forceMirrored;
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@synthesize delegate = _delegate;
|
||||||
|
|
||||||
|
-(instancetype)initWithFrame:(NSRect)frameRect {
|
||||||
|
NSOpenGLPixelFormatAttribute attributes[] = {
|
||||||
|
NSOpenGLPFADoubleBuffer,
|
||||||
|
NSOpenGLPFADepthSize, 24,
|
||||||
|
NSOpenGLPFAOpenGLProfile,
|
||||||
|
NSOpenGLProfileVersion3_2Core,
|
||||||
|
0
|
||||||
|
};
|
||||||
|
NSOpenGLPixelFormat* pixelFormat =
|
||||||
|
[[NSOpenGLPixelFormat alloc] initWithAttributes:attributes];
|
||||||
|
return [self initWithFrame:frameRect pixelFormat: pixelFormat];
|
||||||
|
}
|
||||||
|
|
||||||
|
- (instancetype)initWithFrame:(NSRect)frame pixelFormat:(NSOpenGLPixelFormat *)format {
|
||||||
|
return [self initWithFrame:frame pixelFormat:format shader:[[RTCDefaultShader alloc] init]];
|
||||||
|
}
|
||||||
|
|
||||||
|
- (instancetype)initWithFrame:(NSRect)frame
|
||||||
|
pixelFormat:(NSOpenGLPixelFormat *)format
|
||||||
|
shader:(id<RTCVideoViewShading>)shader {
|
||||||
|
if (self = [super initWithFrame:frame]) {
|
||||||
|
|
||||||
|
_glView = [[OpenGLVideoView alloc] initWithFrame:frame pixelFormat:format shader:shader];
|
||||||
|
_glView.wantsLayer = YES;
|
||||||
|
self.layerContentsRedrawPolicy = NSViewLayerContentsRedrawDuringViewResize;
|
||||||
|
_glView.layerContentsRedrawPolicy = NSViewLayerContentsRedrawDuringViewResize;
|
||||||
|
|
||||||
|
[self addSubview:_glView];
|
||||||
|
|
||||||
|
__weak GLVideoView *weakSelf = self;
|
||||||
|
|
||||||
|
self.wantsLayer = YES;
|
||||||
|
|
||||||
|
_sink.reset(new VideoRendererAdapterImpl(^(CGSize size, RTCVideoFrame *videoFrame, RTCVideoRotation rotation) {
|
||||||
|
dispatch_async(dispatch_get_main_queue(), ^{
|
||||||
|
__strong GLVideoView *strongSelf = weakSelf;
|
||||||
|
if (strongSelf == nil) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
if (!CGSizeEqualToSize(size, strongSelf->_currentSize)) {
|
||||||
|
strongSelf->_currentSize = size;
|
||||||
|
[strongSelf setSize:size];
|
||||||
|
}
|
||||||
|
|
||||||
|
int mappedValue = 0;
|
||||||
|
switch (rotation) {
|
||||||
|
case RTCVideoRotation_90:
|
||||||
|
mappedValue = 0;
|
||||||
|
break;
|
||||||
|
case RTCVideoRotation_180:
|
||||||
|
mappedValue = 1;
|
||||||
|
break;
|
||||||
|
case RTCVideoRotation_270:
|
||||||
|
mappedValue = 2;
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
mappedValue = 0;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
[strongSelf setInternalOrientation:mappedValue];
|
||||||
|
|
||||||
|
[strongSelf renderFrame:videoFrame];
|
||||||
|
});
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
return self;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
- (CALayerContentsGravity)videoContentMode {
|
||||||
|
return self.glView.layer.contentsGravity;
|
||||||
|
}
|
||||||
|
|
||||||
|
- (void)setVideoContentMode:(CALayerContentsGravity)mode {
|
||||||
|
self.glView.layer.contentsGravity = mode;
|
||||||
|
[self setNeedsLayout:YES];
|
||||||
|
}
|
||||||
|
|
||||||
|
-(void)layout {
|
||||||
|
[super layout];
|
||||||
|
|
||||||
|
if (self.bounds.size.width > 0.0f && _currentSize.width > 0) {
|
||||||
|
|
||||||
|
NSSize size = _currentSize;
|
||||||
|
NSSize frameSize = self.frame.size;
|
||||||
|
if ( self.glView.layer.contentsGravity == kCAGravityResizeAspectFill) {
|
||||||
|
size = aspectFitted(frameSize, _currentSize);
|
||||||
|
} else {
|
||||||
|
size = aspectFilled(frameSize, _currentSize);
|
||||||
|
}
|
||||||
|
_glView.frame = CGRectMake(floor((self.bounds.size.width - size.width) / 2.0), floor((self.bounds.size.height - size.height) / 2.0), size.width, size.height);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (_shouldBeMirrored || _forceMirrored) {
|
||||||
|
self.glView.layer.anchorPoint = NSMakePoint(1, 0);
|
||||||
|
self.glView.layer.affineTransform = CGAffineTransformMakeScale(-1, 1);
|
||||||
|
} else {
|
||||||
|
self.glView.layer.anchorPoint = NSMakePoint(0, 0);
|
||||||
|
self.glView.layer.affineTransform = CGAffineTransformIdentity;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
- (void)setSize:(CGSize)size {
|
||||||
|
[self.delegate videoView:self didChangeVideoSize:size];
|
||||||
|
[self setNeedsLayout:YES];
|
||||||
|
}
|
||||||
|
|
||||||
|
- (void)renderFrame:(RTCVideoFrame *)videoFrame {
|
||||||
|
self.glView.videoFrame = videoFrame;
|
||||||
|
|
||||||
|
if ([videoFrame.buffer isKindOfClass:[RTCCVPixelBuffer class]]) {
|
||||||
|
RTCCVPixelBuffer *buffer = (RTCCVPixelBuffer*)videoFrame.buffer;
|
||||||
|
if ([buffer isKindOfClass:[TGRTCCVPixelBuffer class]]) {
|
||||||
|
bool shouldBeMirrored = ((TGRTCCVPixelBuffer *)buffer).shouldBeMirrored;
|
||||||
|
if (shouldBeMirrored != _shouldBeMirrored) {
|
||||||
|
_shouldBeMirrored = shouldBeMirrored;
|
||||||
|
if (shouldBeMirrored || _forceMirrored) {
|
||||||
|
self.glView.layer.anchorPoint = NSMakePoint(1, 0);
|
||||||
|
self.glView.layer.affineTransform = CGAffineTransformMakeScale(-1, 1);
|
||||||
|
} else {
|
||||||
|
self.glView.layer.anchorPoint = NSMakePoint(0, 0);
|
||||||
|
self.glView.layer.affineTransform = CGAffineTransformIdentity;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (shouldBeMirrored != _shouldBeMirrored) {
|
||||||
|
if (_didSetShouldBeMirrored) {
|
||||||
|
if (_onIsMirroredUpdated) {
|
||||||
|
_onIsMirroredUpdated(_shouldBeMirrored);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
_didSetShouldBeMirrored = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#pragma mark - Private
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
- (std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>>)getSink {
|
||||||
|
assert([NSThread isMainThread]);
|
||||||
|
|
||||||
|
return _sink;
|
||||||
|
}
|
||||||
|
|
||||||
|
- (void)setOnFirstFrameReceived:(void (^ _Nullable)(float))onFirstFrameReceived {
|
||||||
|
[self.glView setOnFirstFrameReceived:onFirstFrameReceived];
|
||||||
|
}
|
||||||
|
|
||||||
|
- (void)setInternalOrientation:(int)internalOrientation {
|
||||||
|
_internalOrientation = internalOrientation;
|
||||||
|
if (_onOrientationUpdated) {
|
||||||
|
_onOrientationUpdated(internalOrientation);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
- (void)internalSetOnOrientationUpdated:(void (^ _Nullable)(int))onOrientationUpdated {
|
||||||
|
_onOrientationUpdated = [onOrientationUpdated copy];
|
||||||
|
}
|
||||||
|
|
||||||
|
- (void)internalSetOnIsMirroredUpdated:(void (^ _Nullable)(bool))onIsMirroredUpdated {
|
||||||
|
}
|
||||||
|
|
||||||
|
- (void)setIsForceMirrored:(BOOL)forceMirrored {
|
||||||
|
_forceMirrored = forceMirrored;
|
||||||
|
[self setNeedsLayout:YES];
|
||||||
|
}
|
||||||
|
|
||||||
|
@end
|
||||||
|
|
||||||
|
#endif // !TARGET_OS_IPHONE
|
|
@ -154,13 +154,18 @@ static webrtc::ObjCVideoTrackSource *getObjCVideoSource(const rtc::scoped_refptr
|
||||||
@interface VideoCameraCapturer () <AVCaptureVideoDataOutputSampleBufferDelegate> {
|
@interface VideoCameraCapturer () <AVCaptureVideoDataOutputSampleBufferDelegate> {
|
||||||
rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> _source;
|
rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> _source;
|
||||||
|
|
||||||
|
// Live on main thread.
|
||||||
bool _isFrontCamera;
|
bool _isFrontCamera;
|
||||||
|
|
||||||
dispatch_queue_t _frameQueue;
|
dispatch_queue_t _frameQueue;
|
||||||
|
|
||||||
|
// Live on RTCDispatcherTypeCaptureSession.
|
||||||
AVCaptureDevice *_currentDevice;
|
AVCaptureDevice *_currentDevice;
|
||||||
BOOL _hasRetriedOnFatalError;
|
BOOL _hasRetriedOnFatalError;
|
||||||
BOOL _isRunning;
|
BOOL _isRunning;
|
||||||
BOOL _willBeRunning;
|
|
||||||
|
// Live on RTCDispatcherTypeCaptureSession and main thread.
|
||||||
|
std::atomic<bool> _willBeRunning;
|
||||||
|
|
||||||
AVCaptureVideoDataOutput *_videoDataOutput;
|
AVCaptureVideoDataOutput *_videoDataOutput;
|
||||||
AVCaptureSession *_captureSession;
|
AVCaptureSession *_captureSession;
|
||||||
|
@ -170,16 +175,21 @@ static webrtc::ObjCVideoTrackSource *getObjCVideoSource(const rtc::scoped_refptr
|
||||||
UIDeviceOrientation _orientation;
|
UIDeviceOrientation _orientation;
|
||||||
bool _rotationLock;
|
bool _rotationLock;
|
||||||
|
|
||||||
|
// Live on mainThread.
|
||||||
void (^_isActiveUpdated)(bool);
|
void (^_isActiveUpdated)(bool);
|
||||||
bool _isActiveValue;
|
bool _isActiveValue;
|
||||||
bool _inForegroundValue;
|
bool _inForegroundValue;
|
||||||
bool _isPaused;
|
|
||||||
|
|
||||||
|
// Live on frameQueue and main thread.
|
||||||
|
std::atomic<bool> _isPaused;
|
||||||
|
|
||||||
|
// Live on frameQueue.
|
||||||
float _aspectRatio;
|
float _aspectRatio;
|
||||||
std::vector<uint8_t> _croppingBuffer;
|
std::vector<uint8_t> _croppingBuffer;
|
||||||
std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> _uncroppedSink;
|
std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> _uncroppedSink;
|
||||||
|
|
||||||
int _warmupFrameCount;
|
// Live on frameQueue and RTCDispatcherTypeCaptureSession.
|
||||||
|
std::atomic<int> _warmupFrameCount;
|
||||||
}
|
}
|
||||||
|
|
||||||
@end
|
@end
|
||||||
|
@ -292,18 +302,22 @@ static webrtc::ObjCVideoTrackSource *getObjCVideoSource(const rtc::scoped_refptr
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)setUncroppedSink:(std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>>)sink {
|
- (void)setUncroppedSink:(std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>>)sink {
|
||||||
|
dispatch_async(self.frameQueue, ^{
|
||||||
_uncroppedSink = sink;
|
_uncroppedSink = sink;
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)setPreferredCaptureAspectRatio:(float)aspectRatio {
|
- (void)setPreferredCaptureAspectRatio:(float)aspectRatio {
|
||||||
|
dispatch_async(self.frameQueue, ^{
|
||||||
_aspectRatio = aspectRatio;
|
_aspectRatio = aspectRatio;
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)startCaptureWithDevice:(AVCaptureDevice *)device
|
- (void)startCaptureWithDevice:(AVCaptureDevice *)device
|
||||||
format:(AVCaptureDeviceFormat *)format
|
format:(AVCaptureDeviceFormat *)format
|
||||||
fps:(NSInteger)fps
|
fps:(NSInteger)fps
|
||||||
completionHandler:(nullable void (^)(NSError *))completionHandler {
|
completionHandler:(nullable void (^)(NSError *))completionHandler {
|
||||||
_willBeRunning = YES;
|
_willBeRunning = true;
|
||||||
[RTCDispatcher
|
[RTCDispatcher
|
||||||
dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
|
dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
|
||||||
block:^{
|
block:^{
|
||||||
|
@ -323,7 +337,7 @@ static webrtc::ObjCVideoTrackSource *getObjCVideoSource(const rtc::scoped_refptr
|
||||||
if (completionHandler) {
|
if (completionHandler) {
|
||||||
completionHandler(error);
|
completionHandler(error);
|
||||||
}
|
}
|
||||||
_willBeRunning = NO;
|
_willBeRunning = false;
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
[self reconfigureCaptureSessionInput];
|
[self reconfigureCaptureSessionInput];
|
||||||
|
@ -340,7 +354,7 @@ static webrtc::ObjCVideoTrackSource *getObjCVideoSource(const rtc::scoped_refptr
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)stopCaptureWithCompletionHandler:(nullable void (^)(void))completionHandler {
|
- (void)stopCaptureWithCompletionHandler:(nullable void (^)(void))completionHandler {
|
||||||
_willBeRunning = NO;
|
_willBeRunning = false;
|
||||||
[RTCDispatcher
|
[RTCDispatcher
|
||||||
dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
|
dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
|
||||||
block:^{
|
block:^{
|
||||||
|
|
|
@ -20,7 +20,7 @@
|
||||||
- (void)setIsEnabled:(bool)isEnabled;
|
- (void)setIsEnabled:(bool)isEnabled;
|
||||||
- (void)setPreferredCaptureAspectRatio:(float)aspectRatio;
|
- (void)setPreferredCaptureAspectRatio:(float)aspectRatio;
|
||||||
- (void)setUncroppedSink:(std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>>)sink;
|
- (void)setUncroppedSink:(std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>>)sink;
|
||||||
|
- (BOOL)deviceIsCaptureCompitable:(AVCaptureDevice *)device;
|
||||||
|
|
||||||
@end
|
@end
|
||||||
#endif //WEBRTC_MAC
|
#endif //WEBRTC_MAC
|
||||||
|
|
|
@ -157,9 +157,13 @@ static webrtc::ObjCVideoTrackSource *getObjCVideoSource(const rtc::scoped_refptr
|
||||||
|
|
||||||
dispatch_queue_t _frameQueue;
|
dispatch_queue_t _frameQueue;
|
||||||
AVCaptureDevice *_currentDevice;
|
AVCaptureDevice *_currentDevice;
|
||||||
|
|
||||||
|
// Live on RTCDispatcherTypeCaptureSession.
|
||||||
BOOL _hasRetriedOnFatalError;
|
BOOL _hasRetriedOnFatalError;
|
||||||
BOOL _isRunning;
|
BOOL _isRunning;
|
||||||
BOOL _willBeRunning;
|
|
||||||
|
// Live on RTCDispatcherTypeCaptureSession and main thread.
|
||||||
|
std::atomic<bool> _willBeRunning;
|
||||||
|
|
||||||
AVCaptureVideoDataOutput *_videoDataOutput;
|
AVCaptureVideoDataOutput *_videoDataOutput;
|
||||||
AVCaptureSession *_captureSession;
|
AVCaptureSession *_captureSession;
|
||||||
|
@ -171,16 +175,22 @@ static webrtc::ObjCVideoTrackSource *getObjCVideoSource(const rtc::scoped_refptr
|
||||||
FourCharCode _outputPixelFormat;
|
FourCharCode _outputPixelFormat;
|
||||||
RTCVideoRotation _rotation;
|
RTCVideoRotation _rotation;
|
||||||
|
|
||||||
|
// Live on mainThread.
|
||||||
void (^_isActiveUpdated)(bool);
|
void (^_isActiveUpdated)(bool);
|
||||||
bool _isActiveValue;
|
bool _isActiveValue;
|
||||||
bool _inForegroundValue;
|
bool _inForegroundValue;
|
||||||
bool _isPaused;
|
|
||||||
int _skippedFrame;
|
|
||||||
|
|
||||||
|
// Live on frameQueue and main thread.
|
||||||
|
std::atomic<bool> _isPaused;
|
||||||
|
std::atomic<int> _skippedFrame;
|
||||||
|
|
||||||
|
// Live on frameQueue;
|
||||||
float _aspectRatio;
|
float _aspectRatio;
|
||||||
std::vector<uint8_t> _croppingBuffer;
|
std::vector<uint8_t> _croppingBuffer;
|
||||||
std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> _uncroppedSink;
|
std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> _uncroppedSink;
|
||||||
|
|
||||||
|
int _warmupFrameCount;
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@end
|
@end
|
||||||
|
@ -198,6 +208,8 @@ static webrtc::ObjCVideoTrackSource *getObjCVideoSource(const rtc::scoped_refptr
|
||||||
_skippedFrame = 0;
|
_skippedFrame = 0;
|
||||||
_rotation = RTCVideoRotation_0;
|
_rotation = RTCVideoRotation_0;
|
||||||
|
|
||||||
|
_warmupFrameCount = 100;
|
||||||
|
|
||||||
if (![self setupCaptureSession:[[AVCaptureSession alloc] init]]) {
|
if (![self setupCaptureSession:[[AVCaptureSession alloc] init]]) {
|
||||||
return nil;
|
return nil;
|
||||||
}
|
}
|
||||||
|
@ -211,7 +223,21 @@ static webrtc::ObjCVideoTrackSource *getObjCVideoSource(const rtc::scoped_refptr
|
||||||
}
|
}
|
||||||
|
|
||||||
+ (NSArray<AVCaptureDevice *> *)captureDevices {
|
+ (NSArray<AVCaptureDevice *> *)captureDevices {
|
||||||
return [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
|
AVCaptureDevice * defaultDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
|
||||||
|
NSMutableArray<AVCaptureDevice *> * devices = [[AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo] mutableCopy];
|
||||||
|
|
||||||
|
[devices insertObject:defaultDevice atIndex:0];
|
||||||
|
|
||||||
|
return devices;
|
||||||
|
}
|
||||||
|
|
||||||
|
- (BOOL)deviceIsCaptureCompitable:(AVCaptureDevice *)device {
|
||||||
|
if (![device isConnected] || [device isSuspended]) {
|
||||||
|
return NO;
|
||||||
|
}
|
||||||
|
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device error:nil];
|
||||||
|
|
||||||
|
return [_captureSession canAddInput:input];
|
||||||
}
|
}
|
||||||
|
|
||||||
+ (NSArray<AVCaptureDeviceFormat *> *)supportedFormatsForDevice:(AVCaptureDevice *)device {
|
+ (NSArray<AVCaptureDeviceFormat *> *)supportedFormatsForDevice:(AVCaptureDevice *)device {
|
||||||
|
@ -264,11 +290,15 @@ static webrtc::ObjCVideoTrackSource *getObjCVideoSource(const rtc::scoped_refptr
|
||||||
|
|
||||||
|
|
||||||
- (void)setUncroppedSink:(std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>>)sink {
|
- (void)setUncroppedSink:(std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>>)sink {
|
||||||
|
dispatch_async(self.frameQueue, ^{
|
||||||
_uncroppedSink = sink;
|
_uncroppedSink = sink;
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)setPreferredCaptureAspectRatio:(float)aspectRatio {
|
- (void)setPreferredCaptureAspectRatio:(float)aspectRatio {
|
||||||
_aspectRatio = aspectRatio;
|
dispatch_async(self.frameQueue, ^{
|
||||||
|
_aspectRatio = MAX(0.7, aspectRatio);
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)updateIsActiveValue {
|
- (void)updateIsActiveValue {
|
||||||
|
@ -286,7 +316,7 @@ static webrtc::ObjCVideoTrackSource *getObjCVideoSource(const rtc::scoped_refptr
|
||||||
format:(AVCaptureDeviceFormat *)format
|
format:(AVCaptureDeviceFormat *)format
|
||||||
fps:(NSInteger)fps
|
fps:(NSInteger)fps
|
||||||
completionHandler:(nullable void (^)(NSError *))completionHandler {
|
completionHandler:(nullable void (^)(NSError *))completionHandler {
|
||||||
_willBeRunning = YES;
|
_willBeRunning = true;
|
||||||
[RTCDispatcher
|
[RTCDispatcher
|
||||||
dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
|
dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
|
||||||
block:^{
|
block:^{
|
||||||
|
@ -302,7 +332,7 @@ static webrtc::ObjCVideoTrackSource *getObjCVideoSource(const rtc::scoped_refptr
|
||||||
if (completionHandler) {
|
if (completionHandler) {
|
||||||
completionHandler(error);
|
completionHandler(error);
|
||||||
}
|
}
|
||||||
self->_willBeRunning = NO;
|
self->_willBeRunning = false;
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
[self reconfigureCaptureSessionInput];
|
[self reconfigureCaptureSessionInput];
|
||||||
|
@ -318,7 +348,7 @@ static webrtc::ObjCVideoTrackSource *getObjCVideoSource(const rtc::scoped_refptr
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)stopCaptureWithCompletionHandler:(nullable void (^)(void))completionHandler {
|
- (void)stopCaptureWithCompletionHandler:(nullable void (^)(void))completionHandler {
|
||||||
_willBeRunning = NO;
|
_willBeRunning = false;
|
||||||
[RTCDispatcher
|
[RTCDispatcher
|
||||||
dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
|
dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
|
||||||
block:^{
|
block:^{
|
||||||
|
@ -344,6 +374,12 @@ static webrtc::ObjCVideoTrackSource *getObjCVideoSource(const rtc::scoped_refptr
|
||||||
fromConnection:(AVCaptureConnection *)connection {
|
fromConnection:(AVCaptureConnection *)connection {
|
||||||
NSParameterAssert(captureOutput == _videoDataOutput);
|
NSParameterAssert(captureOutput == _videoDataOutput);
|
||||||
|
|
||||||
|
int minWarmupFrameCount = 12;
|
||||||
|
_warmupFrameCount++;
|
||||||
|
if (_warmupFrameCount < minWarmupFrameCount) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
if (CMSampleBufferGetNumSamples(sampleBuffer) != 1 || !CMSampleBufferIsValid(sampleBuffer) ||
|
if (CMSampleBufferGetNumSamples(sampleBuffer) != 1 || !CMSampleBufferIsValid(sampleBuffer) ||
|
||||||
!CMSampleBufferDataIsReady(sampleBuffer)) {
|
!CMSampleBufferDataIsReady(sampleBuffer)) {
|
||||||
return;
|
return;
|
||||||
|
@ -374,6 +410,8 @@ static webrtc::ObjCVideoTrackSource *getObjCVideoSource(const rtc::scoped_refptr
|
||||||
|
|
||||||
rtcPixelBuffer = [[TGRTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBuffer adaptedWidth:width adaptedHeight:height cropWidth:width cropHeight:height cropX:left cropY:top];
|
rtcPixelBuffer = [[TGRTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBuffer adaptedWidth:width adaptedHeight:height cropWidth:width cropHeight:height cropX:left cropY:top];
|
||||||
|
|
||||||
|
rtcPixelBuffer.shouldBeMirrored = YES;
|
||||||
|
|
||||||
CVPixelBufferRef outputPixelBufferRef = NULL;
|
CVPixelBufferRef outputPixelBufferRef = NULL;
|
||||||
OSType pixelFormat = CVPixelBufferGetPixelFormatType(rtcPixelBuffer.pixelBuffer);
|
OSType pixelFormat = CVPixelBufferGetPixelFormatType(rtcPixelBuffer.pixelBuffer);
|
||||||
CVPixelBufferCreate(NULL, width, height, pixelFormat, NULL, &outputPixelBufferRef);
|
CVPixelBufferCreate(NULL, width, height, pixelFormat, NULL, &outputPixelBufferRef);
|
||||||
|
@ -384,6 +422,7 @@ static webrtc::ObjCVideoTrackSource *getObjCVideoSource(const rtc::scoped_refptr
|
||||||
}
|
}
|
||||||
if ([rtcPixelBuffer cropAndScaleTo:outputPixelBufferRef withTempBuffer:_croppingBuffer.data()]) {
|
if ([rtcPixelBuffer cropAndScaleTo:outputPixelBufferRef withTempBuffer:_croppingBuffer.data()]) {
|
||||||
rtcPixelBuffer = [[TGRTCCVPixelBuffer alloc] initWithPixelBuffer:outputPixelBufferRef];
|
rtcPixelBuffer = [[TGRTCCVPixelBuffer alloc] initWithPixelBuffer:outputPixelBufferRef];
|
||||||
|
rtcPixelBuffer.shouldBeMirrored = YES;
|
||||||
}
|
}
|
||||||
CVPixelBufferRelease(outputPixelBufferRef);
|
CVPixelBufferRelease(outputPixelBufferRef);
|
||||||
}
|
}
|
||||||
|
@ -480,6 +519,7 @@ static webrtc::ObjCVideoTrackSource *getObjCVideoSource(const rtc::scoped_refptr
|
||||||
if (!self->_hasRetriedOnFatalError) {
|
if (!self->_hasRetriedOnFatalError) {
|
||||||
RTCLogWarning(@"Attempting to recover from fatal capture error.");
|
RTCLogWarning(@"Attempting to recover from fatal capture error.");
|
||||||
[self handleNonFatalError];
|
[self handleNonFatalError];
|
||||||
|
self->_warmupFrameCount = 0;
|
||||||
self->_hasRetriedOnFatalError = YES;
|
self->_hasRetriedOnFatalError = YES;
|
||||||
} else {
|
} else {
|
||||||
RTCLogError(@"Previous fatal error recovery failed.");
|
RTCLogError(@"Previous fatal error recovery failed.");
|
||||||
|
@ -492,6 +532,7 @@ static webrtc::ObjCVideoTrackSource *getObjCVideoSource(const rtc::scoped_refptr
|
||||||
block:^{
|
block:^{
|
||||||
RTCLog(@"Restarting capture session after error.");
|
RTCLog(@"Restarting capture session after error.");
|
||||||
if (self->_isRunning) {
|
if (self->_isRunning) {
|
||||||
|
self->_warmupFrameCount = 0;
|
||||||
[self->_captureSession startRunning];
|
[self->_captureSession startRunning];
|
||||||
}
|
}
|
||||||
}];
|
}];
|
||||||
|
@ -504,6 +545,7 @@ static webrtc::ObjCVideoTrackSource *getObjCVideoSource(const rtc::scoped_refptr
|
||||||
block:^{
|
block:^{
|
||||||
if (self->_isRunning && !self->_captureSession.isRunning) {
|
if (self->_isRunning && !self->_captureSession.isRunning) {
|
||||||
RTCLog(@"Restarting capture session on active.");
|
RTCLog(@"Restarting capture session on active.");
|
||||||
|
self->_warmupFrameCount = 0;
|
||||||
[self->_captureSession startRunning];
|
[self->_captureSession startRunning];
|
||||||
}
|
}
|
||||||
}];
|
}];
|
||||||
|
|
|
@ -16,7 +16,7 @@ namespace tgcalls {
|
||||||
|
|
||||||
class VideoCapturerInterfaceImpl : public VideoCapturerInterface {
|
class VideoCapturerInterfaceImpl : public VideoCapturerInterface {
|
||||||
public:
|
public:
|
||||||
VideoCapturerInterfaceImpl(rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> source, bool useFrontCamera, std::function<void(VideoState)> stateUpdated);
|
VideoCapturerInterfaceImpl(rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> source, bool useFrontCamera, std::function<void(VideoState)> stateUpdated, std::pair<int, int> &outResolution);
|
||||||
~VideoCapturerInterfaceImpl() override;
|
~VideoCapturerInterfaceImpl() override;
|
||||||
|
|
||||||
void setState(VideoState state) override;
|
void setState(VideoState state) override;
|
||||||
|
|
|
@ -32,6 +32,28 @@
|
||||||
|
|
||||||
#import "VideoCaptureInterface.h"
|
#import "VideoCaptureInterface.h"
|
||||||
|
|
||||||
|
@interface VideoCapturerInterfaceImplSourceDescription : NSObject
|
||||||
|
|
||||||
|
@property (nonatomic, readonly) bool isFrontCamera;
|
||||||
|
@property (nonatomic, strong, readonly, nonnull) AVCaptureDevice *device;
|
||||||
|
@property (nonatomic, strong, readonly, nonnull) AVCaptureDeviceFormat *format;
|
||||||
|
|
||||||
|
@end
|
||||||
|
|
||||||
|
@implementation VideoCapturerInterfaceImplSourceDescription
|
||||||
|
|
||||||
|
- (instancetype)initWithIsFrontCamera:(bool)isFrontCamera device:(AVCaptureDevice * _Nonnull)device format:(AVCaptureDeviceFormat * _Nonnull)format {
|
||||||
|
self = [super init];
|
||||||
|
if (self != nil) {
|
||||||
|
_isFrontCamera = isFrontCamera;
|
||||||
|
_device = device;
|
||||||
|
_format = format;
|
||||||
|
}
|
||||||
|
return self;
|
||||||
|
}
|
||||||
|
|
||||||
|
@end
|
||||||
|
|
||||||
@interface VideoCapturerInterfaceImplReference : NSObject {
|
@interface VideoCapturerInterfaceImplReference : NSObject {
|
||||||
VideoCameraCapturer *_videoCapturer;
|
VideoCameraCapturer *_videoCapturer;
|
||||||
}
|
}
|
||||||
|
@ -40,15 +62,7 @@
|
||||||
|
|
||||||
@implementation VideoCapturerInterfaceImplReference
|
@implementation VideoCapturerInterfaceImplReference
|
||||||
|
|
||||||
- (instancetype)initWithSource:(rtc::scoped_refptr<webrtc::VideoTrackSourceInterface>)source useFrontCamera:(bool)useFrontCamera isActiveUpdated:(void (^)(bool))isActiveUpdated {
|
+ (AVCaptureDevice *)selectCaptureDevice:(bool)useFrontCamera {
|
||||||
self = [super init];
|
|
||||||
if (self != nil) {
|
|
||||||
assert([NSThread isMainThread]);
|
|
||||||
#ifdef WEBRTC_IOS
|
|
||||||
_videoCapturer = [[VideoCameraCapturer alloc] initWithSource:source useFrontCamera:useFrontCamera isActiveUpdated:isActiveUpdated];
|
|
||||||
#else
|
|
||||||
_videoCapturer = [[VideoCameraCapturer alloc] initWithSource:source isActiveUpdated:isActiveUpdated];
|
|
||||||
#endif
|
|
||||||
AVCaptureDevice *selectedCamera = nil;
|
AVCaptureDevice *selectedCamera = nil;
|
||||||
|
|
||||||
#ifdef WEBRTC_IOS
|
#ifdef WEBRTC_IOS
|
||||||
|
@ -67,13 +81,19 @@
|
||||||
selectedCamera = backCamera;
|
selectedCamera = backCamera;
|
||||||
}
|
}
|
||||||
#else
|
#else
|
||||||
selectedCamera = [VideoCameraCapturer captureDevices].firstObject;
|
NSArray<AVCaptureDevice *> *devices = [VideoCameraCapturer captureDevices];
|
||||||
#endif
|
for (int i = 0; i < devices.count; i++) {
|
||||||
// NSLog(@"%@", selectedCamera);
|
if ([_videoCapturer deviceIsCaptureCompitable:devices[i]]) {
|
||||||
if (selectedCamera == nil) {
|
selectedCamera = devices[i];
|
||||||
return nil;
|
break;
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
|
||||||
|
return selectedCamera;
|
||||||
|
}
|
||||||
|
|
||||||
|
+ (AVCaptureDeviceFormat *)selectCaptureDeviceFormatForDevice:(AVCaptureDevice *)selectedCamera {
|
||||||
NSArray<AVCaptureDeviceFormat *> *sortedFormats = [[VideoCameraCapturer supportedFormatsForDevice:selectedCamera] sortedArrayUsingComparator:^NSComparisonResult(AVCaptureDeviceFormat* lhs, AVCaptureDeviceFormat *rhs) {
|
NSArray<AVCaptureDeviceFormat *> *sortedFormats = [[VideoCameraCapturer supportedFormatsForDevice:selectedCamera] sortedArrayUsingComparator:^NSComparisonResult(AVCaptureDeviceFormat* lhs, AVCaptureDeviceFormat *rhs) {
|
||||||
int32_t width1 = CMVideoFormatDescriptionGetDimensions(lhs.formatDescription).width;
|
int32_t width1 = CMVideoFormatDescriptionGetDimensions(lhs.formatDescription).width;
|
||||||
int32_t width2 = CMVideoFormatDescriptionGetDimensions(rhs.formatDescription).width;
|
int32_t width2 = CMVideoFormatDescriptionGetDimensions(rhs.formatDescription).width;
|
||||||
|
@ -123,7 +143,37 @@
|
||||||
return nil;
|
return nil;
|
||||||
}
|
}
|
||||||
|
|
||||||
[_videoCapturer startCaptureWithDevice:selectedCamera format:bestFormat fps:30];
|
return bestFormat;
|
||||||
|
}
|
||||||
|
|
||||||
|
+ (VideoCapturerInterfaceImplSourceDescription *)selectCapturerDescription:(bool)useFrontCamera {
|
||||||
|
AVCaptureDevice *selectedCamera = [VideoCapturerInterfaceImplReference selectCaptureDevice:useFrontCamera];
|
||||||
|
|
||||||
|
if (selectedCamera == nil) {
|
||||||
|
return nil;
|
||||||
|
}
|
||||||
|
|
||||||
|
AVCaptureDeviceFormat *bestFormat = [VideoCapturerInterfaceImplReference selectCaptureDeviceFormatForDevice:selectedCamera];
|
||||||
|
|
||||||
|
if (bestFormat == nil) {
|
||||||
|
return nil;
|
||||||
|
}
|
||||||
|
|
||||||
|
return [[VideoCapturerInterfaceImplSourceDescription alloc] initWithIsFrontCamera:useFrontCamera device:selectedCamera format:bestFormat];
|
||||||
|
}
|
||||||
|
|
||||||
|
- (instancetype)initWithSource:(rtc::scoped_refptr<webrtc::VideoTrackSourceInterface>)source sourceDescription:(VideoCapturerInterfaceImplSourceDescription *)sourceDescription isActiveUpdated:(void (^)(bool))isActiveUpdated {
|
||||||
|
self = [super init];
|
||||||
|
if (self != nil) {
|
||||||
|
assert([NSThread isMainThread]);
|
||||||
|
|
||||||
|
#ifdef WEBRTC_IOS
|
||||||
|
_videoCapturer = [[VideoCameraCapturer alloc] initWithSource:source useFrontCamera:sourceDescription.isFrontCamera isActiveUpdated:isActiveUpdated];
|
||||||
|
#else
|
||||||
|
_videoCapturer = [[VideoCameraCapturer alloc] initWithSource:source isActiveUpdated:isActiveUpdated];
|
||||||
|
#endif
|
||||||
|
|
||||||
|
[_videoCapturer startCaptureWithDevice:sourceDescription.device format:sourceDescription.format fps:30];
|
||||||
}
|
}
|
||||||
return self;
|
return self;
|
||||||
}
|
}
|
||||||
|
@ -154,12 +204,23 @@
|
||||||
|
|
||||||
namespace tgcalls {
|
namespace tgcalls {
|
||||||
|
|
||||||
VideoCapturerInterfaceImpl::VideoCapturerInterfaceImpl(rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> source, bool useFrontCamera, std::function<void(VideoState)> stateUpdated) :
|
VideoCapturerInterfaceImpl::VideoCapturerInterfaceImpl(rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> source, bool useFrontCamera, std::function<void(VideoState)> stateUpdated, std::pair<int, int> &outResolution) :
|
||||||
_source(source) {
|
_source(source) {
|
||||||
|
VideoCapturerInterfaceImplSourceDescription *sourceDescription = [VideoCapturerInterfaceImplReference selectCapturerDescription:useFrontCamera];
|
||||||
|
|
||||||
|
CMVideoDimensions dimensions = CMVideoFormatDescriptionGetDimensions(sourceDescription.format.formatDescription);
|
||||||
|
#ifdef WEBRTC_IOS
|
||||||
|
outResolution.first = dimensions.height;
|
||||||
|
outResolution.second = dimensions.width;
|
||||||
|
#else
|
||||||
|
outResolution.first = dimensions.width;
|
||||||
|
outResolution.second = dimensions.height;
|
||||||
|
#endif
|
||||||
|
|
||||||
_implReference = [[VideoCapturerInterfaceImplHolder alloc] init];
|
_implReference = [[VideoCapturerInterfaceImplHolder alloc] init];
|
||||||
VideoCapturerInterfaceImplHolder *implReference = _implReference;
|
VideoCapturerInterfaceImplHolder *implReference = _implReference;
|
||||||
dispatch_async(dispatch_get_main_queue(), ^{
|
dispatch_async(dispatch_get_main_queue(), ^{
|
||||||
VideoCapturerInterfaceImplReference *value = [[VideoCapturerInterfaceImplReference alloc] initWithSource:source useFrontCamera:useFrontCamera isActiveUpdated:^(bool isActive) {
|
VideoCapturerInterfaceImplReference *value = [[VideoCapturerInterfaceImplReference alloc] initWithSource:source sourceDescription:sourceDescription isActiveUpdated:^(bool isActive) {
|
||||||
stateUpdated(isActive ? VideoState::Active : VideoState::Paused);
|
stateUpdated(isActive ? VideoState::Active : VideoState::Paused);
|
||||||
}];
|
}];
|
||||||
if (value != nil) {
|
if (value != nil) {
|
||||||
|
|
|
@ -29,7 +29,7 @@
|
||||||
- (void)setOnFirstFrameReceived:(void (^ _Nullable)(float))onFirstFrameReceived;
|
- (void)setOnFirstFrameReceived:(void (^ _Nullable)(float))onFirstFrameReceived;
|
||||||
- (void)internalSetOnOrientationUpdated:(void (^ _Nullable)(int))onOrientationUpdated;
|
- (void)internalSetOnOrientationUpdated:(void (^ _Nullable)(int))onOrientationUpdated;
|
||||||
- (void)internalSetOnIsMirroredUpdated:(void (^ _Nullable)(bool))onIsMirroredUpdated;
|
- (void)internalSetOnIsMirroredUpdated:(void (^ _Nullable)(bool))onIsMirroredUpdated;
|
||||||
|
- (void)setIsForceMirrored:(BOOL)forceMirrored;
|
||||||
@end
|
@end
|
||||||
|
|
||||||
#endif // WEBRTC_MAC
|
#endif // WEBRTC_MAC
|
||||||
|
|
|
@ -19,13 +19,9 @@
|
||||||
#import "api/media_stream_interface.h"
|
#import "api/media_stream_interface.h"
|
||||||
|
|
||||||
#import "RTCMTLI420Renderer.h"
|
#import "RTCMTLI420Renderer.h"
|
||||||
#import "RTCMTLNV12Renderer.h"
|
|
||||||
#import "RTCMTLRGBRenderer.h"
|
|
||||||
|
|
||||||
#define MTKViewClass NSClassFromString(@"MTKView")
|
#define MTKViewClass NSClassFromString(@"MTKView")
|
||||||
#define RTCMTLNV12RendererClass NSClassFromString(@"RTCMTLNV12Renderer")
|
|
||||||
#define RTCMTLI420RendererClass NSClassFromString(@"RTCMTLI420Renderer")
|
#define RTCMTLI420RendererClass NSClassFromString(@"RTCMTLI420Renderer")
|
||||||
#define RTCMTLRGBRendererClass NSClassFromString(@"RTCMTLRGBRenderer")
|
|
||||||
|
|
||||||
namespace {
|
namespace {
|
||||||
|
|
||||||
|
@ -67,7 +63,7 @@ private:
|
||||||
|
|
||||||
@interface VideoMetalView () <MTKViewDelegate> {
|
@interface VideoMetalView () <MTKViewDelegate> {
|
||||||
RTCMTLI420Renderer *_rendererI420;
|
RTCMTLI420Renderer *_rendererI420;
|
||||||
RTCMTLNV12Renderer *_rendererNV12;
|
|
||||||
MTKView *_metalView;
|
MTKView *_metalView;
|
||||||
RTCVideoFrame *_videoFrame;
|
RTCVideoFrame *_videoFrame;
|
||||||
CGSize _videoFrameSize;
|
CGSize _videoFrameSize;
|
||||||
|
@ -83,7 +79,7 @@ private:
|
||||||
|
|
||||||
bool _didSetShouldBeMirrored;
|
bool _didSetShouldBeMirrored;
|
||||||
bool _shouldBeMirrored;
|
bool _shouldBeMirrored;
|
||||||
|
bool _forceMirrored;
|
||||||
}
|
}
|
||||||
|
|
||||||
@end
|
@end
|
||||||
|
@ -91,7 +87,7 @@ private:
|
||||||
@implementation VideoMetalView
|
@implementation VideoMetalView
|
||||||
|
|
||||||
+ (bool)isSupported {
|
+ (bool)isSupported {
|
||||||
return YES;
|
return [VideoMetalView isMetalAvailable];
|
||||||
}
|
}
|
||||||
|
|
||||||
- (instancetype)initWithFrame:(CGRect)frameRect {
|
- (instancetype)initWithFrame:(CGRect)frameRect {
|
||||||
|
@ -164,10 +160,6 @@ private:
|
||||||
return [[MTKViewClass alloc] initWithFrame:frame];
|
return [[MTKViewClass alloc] initWithFrame:frame];
|
||||||
}
|
}
|
||||||
|
|
||||||
+ (RTCMTLNV12Renderer *)createNV12Renderer {
|
|
||||||
return [[RTCMTLNV12RendererClass alloc] init];
|
|
||||||
}
|
|
||||||
|
|
||||||
+ (RTCMTLI420Renderer *)createI420Renderer {
|
+ (RTCMTLI420Renderer *)createI420Renderer {
|
||||||
return [[RTCMTLI420RendererClass alloc] init];
|
return [[RTCMTLI420RendererClass alloc] init];
|
||||||
}
|
}
|
||||||
|
@ -192,7 +184,7 @@ private:
|
||||||
- (void)layout {
|
- (void)layout {
|
||||||
[super layout];
|
[super layout];
|
||||||
|
|
||||||
if (_shouldBeMirrored) {
|
if (_shouldBeMirrored || _forceMirrored) {
|
||||||
_metalView.layer.anchorPoint = NSMakePoint(1, 0);
|
_metalView.layer.anchorPoint = NSMakePoint(1, 0);
|
||||||
_metalView.layer.affineTransform = CGAffineTransformMakeScale(-1, 1);
|
_metalView.layer.affineTransform = CGAffineTransformMakeScale(-1, 1);
|
||||||
// _metalView.layer.transform = CATransform3DMakeScale(-1, 1, 1);
|
// _metalView.layer.transform = CATransform3DMakeScale(-1, 1, 1);
|
||||||
|
@ -232,8 +224,11 @@ private:
|
||||||
|
|
||||||
if ([buffer isKindOfClass:[TGRTCCVPixelBuffer class]]) {
|
if ([buffer isKindOfClass:[TGRTCCVPixelBuffer class]]) {
|
||||||
bool shouldBeMirrored = ((TGRTCCVPixelBuffer *)buffer).shouldBeMirrored;
|
bool shouldBeMirrored = ((TGRTCCVPixelBuffer *)buffer).shouldBeMirrored;
|
||||||
|
if (shouldBeMirrored != _shouldBeMirrored) {
|
||||||
|
_shouldBeMirrored = shouldBeMirrored;
|
||||||
|
bool shouldBeMirrored = ((TGRTCCVPixelBuffer *)buffer).shouldBeMirrored;
|
||||||
|
|
||||||
if (shouldBeMirrored) {
|
if (shouldBeMirrored || _forceMirrored) {
|
||||||
_metalView.layer.anchorPoint = NSMakePoint(1, 0);
|
_metalView.layer.anchorPoint = NSMakePoint(1, 0);
|
||||||
_metalView.layer.affineTransform = CGAffineTransformMakeScale(-1, 1);
|
_metalView.layer.affineTransform = CGAffineTransformMakeScale(-1, 1);
|
||||||
// _metalView.layer.transform = CATransform3DMakeScale(-1, 1, 1);
|
// _metalView.layer.transform = CATransform3DMakeScale(-1, 1, 1);
|
||||||
|
@ -243,10 +238,6 @@ private:
|
||||||
//_metalView.layer.transform = CATransform3DIdentity;
|
//_metalView.layer.transform = CATransform3DIdentity;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (shouldBeMirrored != _shouldBeMirrored) {
|
|
||||||
_shouldBeMirrored = shouldBeMirrored;
|
|
||||||
|
|
||||||
|
|
||||||
if (_didSetShouldBeMirrored) {
|
if (_didSetShouldBeMirrored) {
|
||||||
if (_onIsMirroredUpdated) {
|
if (_onIsMirroredUpdated) {
|
||||||
_onIsMirroredUpdated(_shouldBeMirrored);
|
_onIsMirroredUpdated(_shouldBeMirrored);
|
||||||
|
@ -256,7 +247,7 @@ private:
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
if (!_rendererI420) {
|
if (!_rendererI420) {
|
||||||
_rendererI420 = [VideoMetalView createI420Renderer];
|
_rendererI420 = [VideoMetalView createI420Renderer];
|
||||||
if (![_rendererI420 addRenderingDestination:_metalView]) {
|
if (![_rendererI420 addRenderingDestination:_metalView]) {
|
||||||
|
@ -266,21 +257,15 @@ private:
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
renderer = _rendererI420;
|
renderer = _rendererI420;
|
||||||
}
|
|
||||||
|
|
||||||
if (!_firstFrameReceivedReported && _onFirstFrameReceived) {
|
|
||||||
_firstFrameReceivedReported = true;
|
|
||||||
_onFirstFrameReceived((float)videoFrame.width / (float)videoFrame.height);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
renderer = _rendererI420;
|
|
||||||
|
|
||||||
renderer.rotationOverride = _rotationOverride;
|
renderer.rotationOverride = _rotationOverride;
|
||||||
[renderer drawFrame:videoFrame];
|
[renderer drawFrame:videoFrame];
|
||||||
_lastFrameTimeNs = videoFrame.timeStampNs;
|
_lastFrameTimeNs = videoFrame.timeStampNs;
|
||||||
|
|
||||||
|
if (!_firstFrameReceivedReported && _onFirstFrameReceived) {
|
||||||
|
_firstFrameReceivedReported = true;
|
||||||
|
_onFirstFrameReceived((float)videoFrame.width / (float)videoFrame.height);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)mtkView:(MTKView *)view drawableSizeWillChange:(CGSize)size {
|
- (void)mtkView:(MTKView *)view drawableSizeWillChange:(CGSize)size {
|
||||||
|
@ -346,11 +331,15 @@ private:
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
if (frame == nil) {
|
if (frame == nil) {
|
||||||
RTCLogInfo(@"Incoming frame is nil. Exiting render callback.");
|
RTCLogInfo(@"Incoming frame is nil. Exiting render callback.");
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
_videoFrame = frame;
|
_videoFrame = frame;
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
- (std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>>)getSink {
|
- (std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>>)getSink {
|
||||||
|
@ -379,4 +368,10 @@ private:
|
||||||
_onIsMirroredUpdated = [onIsMirroredUpdated copy];
|
_onIsMirroredUpdated = [onIsMirroredUpdated copy];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
- (void)setIsForceMirrored:(BOOL)forceMirrored {
|
||||||
|
_forceMirrored = forceMirrored;
|
||||||
|
[self setNeedsLayout:YES];
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
@end
|
@end
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
#include "WindowsInterface.h"
|
#include "DesktopInterface.h"
|
||||||
|
|
||||||
#include "platform/tdesktop/VideoCapturerInterfaceImpl.h"
|
#include "platform/tdesktop/VideoCapturerInterfaceImpl.h"
|
||||||
#include "platform/tdesktop/VideoCapturerTrackSource.h"
|
#include "platform/tdesktop/VideoCapturerTrackSource.h"
|
||||||
|
@ -9,30 +9,32 @@
|
||||||
|
|
||||||
namespace tgcalls {
|
namespace tgcalls {
|
||||||
|
|
||||||
std::unique_ptr<webrtc::VideoEncoderFactory> WindowsInterface::makeVideoEncoderFactory() {
|
std::unique_ptr<webrtc::VideoEncoderFactory> DesktopInterface::makeVideoEncoderFactory() {
|
||||||
return webrtc::CreateBuiltinVideoEncoderFactory();
|
return webrtc::CreateBuiltinVideoEncoderFactory();
|
||||||
}
|
}
|
||||||
|
|
||||||
std::unique_ptr<webrtc::VideoDecoderFactory> WindowsInterface::makeVideoDecoderFactory() {
|
std::unique_ptr<webrtc::VideoDecoderFactory> DesktopInterface::makeVideoDecoderFactory() {
|
||||||
return webrtc::CreateBuiltinVideoDecoderFactory();
|
return webrtc::CreateBuiltinVideoDecoderFactory();
|
||||||
}
|
}
|
||||||
|
|
||||||
rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> WindowsInterface::makeVideoSource(rtc::Thread *signalingThread, rtc::Thread *workerThread) {
|
rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> DesktopInterface::makeVideoSource(rtc::Thread *signalingThread, rtc::Thread *workerThread) {
|
||||||
const auto videoTrackSource = VideoCapturerTrackSource::Create();
|
const auto videoTrackSource = VideoCapturerTrackSource::Create();
|
||||||
return webrtc::VideoTrackSourceProxy::Create(signalingThread, workerThread, videoTrackSource);
|
return videoTrackSource
|
||||||
|
? webrtc::VideoTrackSourceProxy::Create(signalingThread, workerThread, videoTrackSource)
|
||||||
|
: nullptr;
|
||||||
}
|
}
|
||||||
|
|
||||||
bool WindowsInterface::supportsEncoding(const std::string &codecName) {
|
bool DesktopInterface::supportsEncoding(const std::string &codecName) {
|
||||||
return (codecName == cricket::kH264CodecName)
|
return (codecName == cricket::kH264CodecName)
|
||||||
|| (codecName == cricket::kVp8CodecName);
|
|| (codecName == cricket::kVp8CodecName);
|
||||||
}
|
}
|
||||||
|
|
||||||
std::unique_ptr<VideoCapturerInterface> WindowsInterface::makeVideoCapturer(rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> source, bool useFrontCamera, std::function<void(VideoState)> stateUpdated, std::shared_ptr<PlatformContext> platformContext) {
|
std::unique_ptr<VideoCapturerInterface> DesktopInterface::makeVideoCapturer(rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> source, bool useFrontCamera, std::function<void(VideoState)> stateUpdated, std::shared_ptr<PlatformContext> platformContext) {
|
||||||
return std::make_unique<VideoCapturerInterfaceImpl>(source, useFrontCamera, stateUpdated);
|
return std::make_unique<VideoCapturerInterfaceImpl>(source, useFrontCamera, stateUpdated);
|
||||||
}
|
}
|
||||||
|
|
||||||
std::unique_ptr<PlatformInterface> CreatePlatformInterface() {
|
std::unique_ptr<PlatformInterface> CreatePlatformInterface() {
|
||||||
return std::make_unique<WindowsInterface>();
|
return std::make_unique<DesktopInterface>();
|
||||||
}
|
}
|
||||||
|
|
||||||
} // namespace tgcalls
|
} // namespace tgcalls
|
|
@ -1,12 +1,12 @@
|
||||||
#ifndef TGCALLS_WINDOWS_INTERFACE_H
|
#ifndef TGCALLS_DESKTOP_INTERFACE_H
|
||||||
#define TGCALLS_WINDOWS_INTERFACE_H
|
#define TGCALLS_DESKTOP_INTERFACE_H
|
||||||
|
|
||||||
#include "platform/PlatformInterface.h"
|
#include "platform/PlatformInterface.h"
|
||||||
#include "VideoCapturerInterface.h"
|
#include "VideoCapturerInterface.h"
|
||||||
|
|
||||||
namespace tgcalls {
|
namespace tgcalls {
|
||||||
|
|
||||||
class WindowsInterface : public PlatformInterface {
|
class DesktopInterface : public PlatformInterface {
|
||||||
public:
|
public:
|
||||||
std::unique_ptr<webrtc::VideoEncoderFactory> makeVideoEncoderFactory() override;
|
std::unique_ptr<webrtc::VideoEncoderFactory> makeVideoEncoderFactory() override;
|
||||||
std::unique_ptr<webrtc::VideoDecoderFactory> makeVideoDecoderFactory() override;
|
std::unique_ptr<webrtc::VideoDecoderFactory> makeVideoDecoderFactory() override;
|
||||||
|
@ -18,4 +18,4 @@ public:
|
||||||
|
|
||||||
} // namespace tgcalls
|
} // namespace tgcalls
|
||||||
|
|
||||||
#endif
|
#endif // TGCALLS_DESKTOP_INTERFACE_H
|
|
@ -325,7 +325,6 @@ public:
|
||||||
dependencies.event_log_factory =
|
dependencies.event_log_factory =
|
||||||
std::make_unique<webrtc::RtcEventLogFactory>(dependencies.task_queue_factory.get());
|
std::make_unique<webrtc::RtcEventLogFactory>(dependencies.task_queue_factory.get());
|
||||||
dependencies.network_controller_factory = nullptr;
|
dependencies.network_controller_factory = nullptr;
|
||||||
//dependencies.media_transport_factory = nullptr;
|
|
||||||
|
|
||||||
_nativeFactory = webrtc::CreateModularPeerConnectionFactory(std::move(dependencies));
|
_nativeFactory = webrtc::CreateModularPeerConnectionFactory(std::move(dependencies));
|
||||||
|
|
||||||
|
@ -1002,8 +1001,10 @@ int InstanceImplReference::GetConnectionMaxLayer() {
|
||||||
return 92;
|
return 92;
|
||||||
}
|
}
|
||||||
|
|
||||||
std::string InstanceImplReference::GetVersion() {
|
std::vector<std::string> InstanceImplReference::GetVersions() {
|
||||||
return "2.8.8";
|
std::vector<std::string> result;
|
||||||
|
result.push_back("2.8.8");
|
||||||
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
std::string InstanceImplReference::getLastError() {
|
std::string InstanceImplReference::getLastError() {
|
||||||
|
@ -1027,7 +1028,7 @@ PersistentState InstanceImplReference::getPersistentState() {
|
||||||
return PersistentState();
|
return PersistentState();
|
||||||
}
|
}
|
||||||
|
|
||||||
FinalState InstanceImplReference::stop() {
|
void InstanceImplReference::stop(std::function<void(FinalState)> completion) {
|
||||||
auto result = FinalState();
|
auto result = FinalState();
|
||||||
|
|
||||||
result.persistentState = getPersistentState();
|
result.persistentState = getPersistentState();
|
||||||
|
@ -1035,7 +1036,7 @@ FinalState InstanceImplReference::stop() {
|
||||||
result.trafficStats = getTrafficStats();
|
result.trafficStats = getTrafficStats();
|
||||||
result.isRatingSuggested = false;
|
result.isRatingSuggested = false;
|
||||||
|
|
||||||
return result;
|
completion(result);
|
||||||
}
|
}
|
||||||
|
|
||||||
template <>
|
template <>
|
||||||
|
|
|
@ -28,13 +28,13 @@ public:
|
||||||
void setAudioOutputDuckingEnabled(bool enabled) override;
|
void setAudioOutputDuckingEnabled(bool enabled) override;
|
||||||
void setIsLowBatteryLevel(bool isLowBatteryLevel) override;
|
void setIsLowBatteryLevel(bool isLowBatteryLevel) override;
|
||||||
static int GetConnectionMaxLayer();
|
static int GetConnectionMaxLayer();
|
||||||
static std::string GetVersion();
|
static std::vector<std::string> GetVersions();
|
||||||
std::string getLastError() override;
|
std::string getLastError() override;
|
||||||
std::string getDebugInfo() override;
|
std::string getDebugInfo() override;
|
||||||
int64_t getPreferredRelayId() override;
|
int64_t getPreferredRelayId() override;
|
||||||
TrafficStats getTrafficStats() override;
|
TrafficStats getTrafficStats() override;
|
||||||
PersistentState getPersistentState() override;
|
PersistentState getPersistentState() override;
|
||||||
FinalState stop() override;
|
void stop(std::function<void(FinalState)> completion) override;
|
||||||
|
|
||||||
private:
|
private:
|
||||||
std::unique_ptr<LogSinkImpl> logSink_;
|
std::unique_ptr<LogSinkImpl> logSink_;
|
||||||
|
|
|
@ -19,7 +19,7 @@ public class BuildVars {
|
||||||
public static boolean USE_CLOUD_STRINGS = true;
|
public static boolean USE_CLOUD_STRINGS = true;
|
||||||
public static boolean CHECK_UPDATES = true;
|
public static boolean CHECK_UPDATES = true;
|
||||||
public static boolean TON_WALLET_STANDALONE = false;
|
public static boolean TON_WALLET_STANDALONE = false;
|
||||||
public static int BUILD_VERSION = 2061;
|
public static int BUILD_VERSION = 2064;
|
||||||
public static String BUILD_VERSION_STRING = "7.0.0";
|
public static String BUILD_VERSION_STRING = "7.0.0";
|
||||||
public static int APP_ID = 4;
|
public static int APP_ID = 4;
|
||||||
public static String APP_HASH = "014b35b6184100b085b0d0572f9b5103";
|
public static String APP_HASH = "014b35b6184100b085b0d0572f9b5103";
|
||||||
|
|
|
@ -288,6 +288,8 @@ public class MessagesController extends BaseController implements NotificationCe
|
||||||
private SharedPreferences mainPreferences;
|
private SharedPreferences mainPreferences;
|
||||||
private SharedPreferences emojiPreferences;
|
private SharedPreferences emojiPreferences;
|
||||||
|
|
||||||
|
public volatile boolean ignoreSetOnline;
|
||||||
|
|
||||||
public static class FaqSearchResult {
|
public static class FaqSearchResult {
|
||||||
|
|
||||||
public String title;
|
public String title;
|
||||||
|
@ -2064,6 +2066,7 @@ public class MessagesController extends BaseController implements NotificationCe
|
||||||
suggestedFilters.clear();
|
suggestedFilters.clear();
|
||||||
gettingAppChangelog = false;
|
gettingAppChangelog = false;
|
||||||
dialogFiltersLoaded = false;
|
dialogFiltersLoaded = false;
|
||||||
|
ignoreSetOnline = false;
|
||||||
|
|
||||||
Utilities.stageQueue.postRunnable(() -> {
|
Utilities.stageQueue.postRunnable(() -> {
|
||||||
readTasks.clear();
|
readTasks.clear();
|
||||||
|
@ -4343,7 +4346,7 @@ public class MessagesController extends BaseController implements NotificationCe
|
||||||
checkReadTasks();
|
checkReadTasks();
|
||||||
|
|
||||||
if (getUserConfig().isClientActivated()) {
|
if (getUserConfig().isClientActivated()) {
|
||||||
if (getConnectionsManager().getPauseTime() == 0 && ApplicationLoader.isScreenOn && !ApplicationLoader.mainInterfacePausedStageQueue) {
|
if (!ignoreSetOnline && getConnectionsManager().getPauseTime() == 0 && ApplicationLoader.isScreenOn && !ApplicationLoader.mainInterfacePausedStageQueue) {
|
||||||
if (ApplicationLoader.mainInterfacePausedStageQueueTime != 0 && Math.abs(ApplicationLoader.mainInterfacePausedStageQueueTime - System.currentTimeMillis()) > 1000) {
|
if (ApplicationLoader.mainInterfacePausedStageQueueTime != 0 && Math.abs(ApplicationLoader.mainInterfacePausedStageQueueTime - System.currentTimeMillis()) > 1000) {
|
||||||
if (statusSettingState != 1 && (lastStatusUpdateTime == 0 || Math.abs(System.currentTimeMillis() - lastStatusUpdateTime) >= 55000 || offlineSent)) {
|
if (statusSettingState != 1 && (lastStatusUpdateTime == 0 || Math.abs(System.currentTimeMillis() - lastStatusUpdateTime) >= 55000 || offlineSent)) {
|
||||||
statusSettingState = 1;
|
statusSettingState = 1;
|
||||||
|
@ -11785,6 +11788,9 @@ public class MessagesController extends BaseController implements NotificationCe
|
||||||
} else {
|
} else {
|
||||||
ApplicationLoader.applicationContext.startService(intent);
|
ApplicationLoader.applicationContext.startService(intent);
|
||||||
}
|
}
|
||||||
|
if (ApplicationLoader.mainInterfacePaused || !ApplicationLoader.isScreenOn) {
|
||||||
|
ignoreSetOnline = true;
|
||||||
|
}
|
||||||
} catch (Throwable e) {
|
} catch (Throwable e) {
|
||||||
FileLog.e(e);
|
FileLog.e(e);
|
||||||
}
|
}
|
||||||
|
|
|
@ -5,6 +5,8 @@ import org.telegram.messenger.BuildVars;
|
||||||
import org.telegram.messenger.FileLog;
|
import org.telegram.messenger.FileLog;
|
||||||
import org.webrtc.VideoSink;
|
import org.webrtc.VideoSink;
|
||||||
|
|
||||||
|
import java.util.concurrent.CountDownLatch;
|
||||||
|
|
||||||
public class NativeInstance {
|
public class NativeInstance {
|
||||||
|
|
||||||
private Instance.OnStateUpdatedListener onStateUpdatedListener;
|
private Instance.OnStateUpdatedListener onStateUpdatedListener;
|
||||||
|
@ -69,6 +71,27 @@ public class NativeInstance {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
private Instance.FinalState finalState;
|
||||||
|
private CountDownLatch stopBarrier;
|
||||||
|
private void onStop(Instance.FinalState state) {
|
||||||
|
finalState = state;
|
||||||
|
if (stopBarrier != null) {
|
||||||
|
stopBarrier.countDown();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public Instance.FinalState stop() {
|
||||||
|
stopBarrier = new CountDownLatch(1);
|
||||||
|
stopNative();
|
||||||
|
try {
|
||||||
|
stopBarrier.await();
|
||||||
|
} catch (Exception e) {
|
||||||
|
FileLog.e(e);
|
||||||
|
}
|
||||||
|
return finalState;
|
||||||
|
}
|
||||||
|
|
||||||
private static native long makeNativeInstance(String version, NativeInstance instance, Instance.Config config, String persistentStateFilePath, Instance.Endpoint[] endpoints, Instance.Proxy proxy, int networkType, Instance.EncryptionKey encryptionKey, VideoSink remoteSink, long videoCapturer, float aspectRatio);
|
private static native long makeNativeInstance(String version, NativeInstance instance, Instance.Config config, String persistentStateFilePath, Instance.Endpoint[] endpoints, Instance.Proxy proxy, int networkType, Instance.EncryptionKey encryptionKey, VideoSink remoteSink, long videoCapturer, float aspectRatio);
|
||||||
public static native long createVideoCapturer(VideoSink localSink);
|
public static native long createVideoCapturer(VideoSink localSink);
|
||||||
public static native void setVideoStateCapturer(long videoCapturer, int videoState);
|
public static native void setVideoStateCapturer(long videoCapturer, int videoState);
|
||||||
|
@ -87,7 +110,7 @@ public class NativeInstance {
|
||||||
public native long getPreferredRelayId();
|
public native long getPreferredRelayId();
|
||||||
public native Instance.TrafficStats getTrafficStats();
|
public native Instance.TrafficStats getTrafficStats();
|
||||||
public native byte[] getPersistentState();
|
public native byte[] getPersistentState();
|
||||||
public native Instance.FinalState stop();
|
private native void stopNative();
|
||||||
public native void setupOutgoingVideo(VideoSink localSink);
|
public native void setupOutgoingVideo(VideoSink localSink);
|
||||||
public native void switchCamera();
|
public native void switchCamera();
|
||||||
public native void setVideoState(int videoState);
|
public native void setVideoState(int videoState);
|
||||||
|
|
|
@ -14,10 +14,7 @@ import org.webrtc.CameraEnumerator;
|
||||||
import org.webrtc.CameraVideoCapturer;
|
import org.webrtc.CameraVideoCapturer;
|
||||||
import org.webrtc.CapturerObserver;
|
import org.webrtc.CapturerObserver;
|
||||||
import org.webrtc.EglBase;
|
import org.webrtc.EglBase;
|
||||||
import org.webrtc.NativeAndroidVideoTrackSource;
|
|
||||||
import org.webrtc.NativeCapturerObserver;
|
|
||||||
import org.webrtc.SurfaceTextureHelper;
|
import org.webrtc.SurfaceTextureHelper;
|
||||||
import org.webrtc.VideoSource;
|
|
||||||
|
|
||||||
@TargetApi(18)
|
@TargetApi(18)
|
||||||
public class VideoCameraCapturer {
|
public class VideoCameraCapturer {
|
||||||
|
|
|
@ -62,6 +62,7 @@ import android.view.View;
|
||||||
import android.widget.RemoteViews;
|
import android.widget.RemoteViews;
|
||||||
|
|
||||||
import org.telegram.messenger.AndroidUtilities;
|
import org.telegram.messenger.AndroidUtilities;
|
||||||
|
import org.telegram.messenger.ApplicationLoader;
|
||||||
import org.telegram.messenger.BuildConfig;
|
import org.telegram.messenger.BuildConfig;
|
||||||
import org.telegram.messenger.BuildVars;
|
import org.telegram.messenger.BuildVars;
|
||||||
import org.telegram.messenger.ContactsController;
|
import org.telegram.messenger.ContactsController;
|
||||||
|
@ -631,6 +632,9 @@ public abstract class VoIPBaseService extends Service implements SensorEventList
|
||||||
}
|
}
|
||||||
stopForeground(true);
|
stopForeground(true);
|
||||||
stopRinging();
|
stopRinging();
|
||||||
|
if (ApplicationLoader.mainInterfacePaused || !ApplicationLoader.isScreenOn) {
|
||||||
|
MessagesController.getInstance(currentAccount).ignoreSetOnline = false;
|
||||||
|
}
|
||||||
NotificationCenter.getInstance(currentAccount).removeObserver(this, NotificationCenter.appDidLogout);
|
NotificationCenter.getInstance(currentAccount).removeObserver(this, NotificationCenter.appDidLogout);
|
||||||
SensorManager sm = (SensorManager) getSystemService(SENSOR_SERVICE);
|
SensorManager sm = (SensorManager) getSystemService(SENSOR_SERVICE);
|
||||||
Sensor proximity = sm.getDefaultSensor(Sensor.TYPE_PROXIMITY);
|
Sensor proximity = sm.getDefaultSensor(Sensor.TYPE_PROXIMITY);
|
||||||
|
@ -649,10 +653,11 @@ public abstract class VoIPBaseService extends Service implements SensorEventList
|
||||||
sharedInstance = null;
|
sharedInstance = null;
|
||||||
AndroidUtilities.runOnUIThread(() -> NotificationCenter.getGlobalInstance().postNotificationName(NotificationCenter.didEndCall));
|
AndroidUtilities.runOnUIThread(() -> NotificationCenter.getGlobalInstance().postNotificationName(NotificationCenter.didEndCall));
|
||||||
if (tgVoip != null) {
|
if (tgVoip != null) {
|
||||||
updateTrafficStats();
|
|
||||||
StatsController.getInstance(currentAccount).incrementTotalCallsTime(getStatsNetworkType(), (int) (getCallDuration() / 1000) % 5);
|
StatsController.getInstance(currentAccount).incrementTotalCallsTime(getStatsNetworkType(), (int) (getCallDuration() / 1000) % 5);
|
||||||
onTgVoipPreStop();
|
onTgVoipPreStop();
|
||||||
onTgVoipStop(tgVoip.stop());
|
Instance.FinalState state = tgVoip.stop();
|
||||||
|
updateTrafficStats(state.trafficStats);
|
||||||
|
onTgVoipStop(state);
|
||||||
prevTrafficStats = null;
|
prevTrafficStats = null;
|
||||||
callStartTime = 0;
|
callStartTime = 0;
|
||||||
tgVoip = null;
|
tgVoip = null;
|
||||||
|
@ -802,8 +807,10 @@ public abstract class VoIPBaseService extends Service implements SensorEventList
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
protected void updateTrafficStats() {
|
protected void updateTrafficStats(Instance.TrafficStats trafficStats) {
|
||||||
final Instance.TrafficStats trafficStats = tgVoip.getTrafficStats();
|
if (trafficStats == null) {
|
||||||
|
trafficStats = tgVoip.getTrafficStats();
|
||||||
|
}
|
||||||
final long wifiSentDiff = trafficStats.bytesSentWifi - (prevTrafficStats != null ? prevTrafficStats.bytesSentWifi : 0);
|
final long wifiSentDiff = trafficStats.bytesSentWifi - (prevTrafficStats != null ? prevTrafficStats.bytesSentWifi : 0);
|
||||||
final long wifiRecvdDiff = trafficStats.bytesReceivedWifi - (prevTrafficStats != null ? prevTrafficStats.bytesReceivedWifi : 0);
|
final long wifiRecvdDiff = trafficStats.bytesReceivedWifi - (prevTrafficStats != null ? prevTrafficStats.bytesReceivedWifi : 0);
|
||||||
final long mobileSentDiff = trafficStats.bytesSentMobile - (prevTrafficStats != null ? prevTrafficStats.bytesSentMobile : 0);
|
final long mobileSentDiff = trafficStats.bytesSentMobile - (prevTrafficStats != null ? prevTrafficStats.bytesSentMobile : 0);
|
||||||
|
@ -1596,7 +1603,7 @@ public abstract class VoIPBaseService extends Service implements SensorEventList
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public class SharedUIParams {
|
public static class SharedUIParams {
|
||||||
public boolean tapToVideoTooltipWasShowed;
|
public boolean tapToVideoTooltipWasShowed;
|
||||||
public boolean cameraAlertWasShowed;
|
public boolean cameraAlertWasShowed;
|
||||||
public boolean wasVideoCall;
|
public boolean wasVideoCall;
|
||||||
|
|
|
@ -537,6 +537,7 @@ public class VoIPService extends VoIPBaseService {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void acceptIncomingCall() {
|
public void acceptIncomingCall() {
|
||||||
|
MessagesController.getInstance(currentAccount).ignoreSetOnline = false;
|
||||||
stopRinging();
|
stopRinging();
|
||||||
showNotification();
|
showNotification();
|
||||||
configureDeviceForCall();
|
configureDeviceForCall();
|
||||||
|
@ -1141,7 +1142,7 @@ public class VoIPService extends VoIPBaseService {
|
||||||
@Override
|
@Override
|
||||||
public void run() {
|
public void run() {
|
||||||
if (tgVoip != null) {
|
if (tgVoip != null) {
|
||||||
updateTrafficStats();
|
updateTrafficStats(null);
|
||||||
AndroidUtilities.runOnUIThread(this, 5000);
|
AndroidUtilities.runOnUIThread(this, 5000);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -151,6 +151,8 @@ public class ChartHeaderView extends FrameLayout {
|
||||||
back.setAlpha(1f);
|
back.setAlpha(1f);
|
||||||
back.setTranslationX(0);
|
back.setTranslationX(0);
|
||||||
back.setTranslationY(0);
|
back.setTranslationY(0);
|
||||||
|
back.setScaleX(1f);
|
||||||
|
back.setScaleY(1f);
|
||||||
title.setAlpha(0f);
|
title.setAlpha(0f);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -8798,7 +8798,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not
|
||||||
}
|
}
|
||||||
updateChatListViewTopPadding();
|
updateChatListViewTopPadding();
|
||||||
|
|
||||||
if (!firstLoading && !paused && !inPreviewMode && !inScheduleMode) {
|
if (!firstLoading && !paused && !inPreviewMode && !inScheduleMode && !getMessagesController().ignoreSetOnline) {
|
||||||
int scheduledRead = 0;
|
int scheduledRead = 0;
|
||||||
if ((maxPositiveUnreadId != Integer.MIN_VALUE || maxNegativeUnreadId != Integer.MAX_VALUE)) {
|
if ((maxPositiveUnreadId != Integer.MIN_VALUE || maxNegativeUnreadId != Integer.MAX_VALUE)) {
|
||||||
int counterDecrement = 0;
|
int counterDecrement = 0;
|
||||||
|
|
|
@ -27,8 +27,6 @@ import androidx.annotation.NonNull;
|
||||||
import androidx.core.content.ContextCompat;
|
import androidx.core.content.ContextCompat;
|
||||||
import androidx.core.graphics.ColorUtils;
|
import androidx.core.graphics.ColorUtils;
|
||||||
|
|
||||||
import com.google.android.exoplayer2.util.Log;
|
|
||||||
|
|
||||||
import org.telegram.messenger.AndroidUtilities;
|
import org.telegram.messenger.AndroidUtilities;
|
||||||
import org.telegram.messenger.R;
|
import org.telegram.messenger.R;
|
||||||
import org.telegram.ui.Components.CubicBezierInterpolator;
|
import org.telegram.ui.Components.CubicBezierInterpolator;
|
||||||
|
|
|
@ -201,7 +201,8 @@ public class VoIPPiPView implements VoIPBaseService.StateListener {
|
||||||
windowLayoutParams.flags = WindowManager.LayoutParams.FLAG_NOT_FOCUSABLE |
|
windowLayoutParams.flags = WindowManager.LayoutParams.FLAG_NOT_FOCUSABLE |
|
||||||
WindowManager.LayoutParams.FLAG_HARDWARE_ACCELERATED |
|
WindowManager.LayoutParams.FLAG_HARDWARE_ACCELERATED |
|
||||||
WindowManager.LayoutParams.FLAG_LAYOUT_NO_LIMITS |
|
WindowManager.LayoutParams.FLAG_LAYOUT_NO_LIMITS |
|
||||||
WindowManager.LayoutParams.FLAG_LAYOUT_IN_SCREEN;
|
WindowManager.LayoutParams.FLAG_LAYOUT_IN_SCREEN |
|
||||||
|
WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON;
|
||||||
|
|
||||||
return windowLayoutParams;
|
return windowLayoutParams;
|
||||||
}
|
}
|
||||||
|
|
|
@ -12,6 +12,7 @@ import android.graphics.PorterDuff;
|
||||||
import android.graphics.PorterDuffXfermode;
|
import android.graphics.PorterDuffXfermode;
|
||||||
import android.graphics.RectF;
|
import android.graphics.RectF;
|
||||||
import android.os.Build;
|
import android.os.Build;
|
||||||
|
import android.view.Gravity;
|
||||||
import android.view.View;
|
import android.view.View;
|
||||||
import android.view.ViewOutlineProvider;
|
import android.view.ViewOutlineProvider;
|
||||||
import android.widget.FrameLayout;
|
import android.widget.FrameLayout;
|
||||||
|
@ -19,11 +20,10 @@ import android.widget.ImageView;
|
||||||
|
|
||||||
import androidx.annotation.NonNull;
|
import androidx.annotation.NonNull;
|
||||||
|
|
||||||
import com.google.android.exoplayer2.util.Log;
|
|
||||||
|
|
||||||
import org.telegram.messenger.ApplicationLoader;
|
import org.telegram.messenger.ApplicationLoader;
|
||||||
import org.telegram.messenger.Utilities;
|
import org.telegram.messenger.Utilities;
|
||||||
import org.telegram.ui.Components.BackupImageView;
|
import org.telegram.ui.Components.LayoutHelper;
|
||||||
|
import org.webrtc.RendererCommon;
|
||||||
import org.webrtc.TextureViewRenderer;
|
import org.webrtc.TextureViewRenderer;
|
||||||
|
|
||||||
import java.io.File;
|
import java.io.File;
|
||||||
|
@ -41,6 +41,7 @@ public class VoIPTextureView extends FrameLayout {
|
||||||
|
|
||||||
public final TextureViewRenderer renderer;
|
public final TextureViewRenderer renderer;
|
||||||
public final ImageView imageView;
|
public final ImageView imageView;
|
||||||
|
public View backgroundView;
|
||||||
|
|
||||||
public Bitmap cameraLastBitmap;
|
public Bitmap cameraLastBitmap;
|
||||||
public float stubVisibleProgress = 1f;
|
public float stubVisibleProgress = 1f;
|
||||||
|
@ -55,10 +56,23 @@ public class VoIPTextureView extends FrameLayout {
|
||||||
super.onFirstFrameRendered();
|
super.onFirstFrameRendered();
|
||||||
VoIPTextureView.this.invalidate();
|
VoIPTextureView.this.invalidate();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected void onMeasure(int widthSpec, int heightSpec) {
|
||||||
|
super.onMeasure(widthSpec, heightSpec);
|
||||||
|
}
|
||||||
};
|
};
|
||||||
renderer.setEnableHardwareScaler(true);
|
renderer.setEnableHardwareScaler(true);
|
||||||
|
renderer.setIsCamera(isCamera);
|
||||||
|
if (!isCamera) {
|
||||||
|
backgroundView = new View(context);
|
||||||
|
backgroundView.setBackgroundColor(0xff1b1f23);
|
||||||
|
addView(backgroundView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT));
|
||||||
|
renderer.setScalingType(RendererCommon.ScalingType.SCALE_ASPECT_FIT);
|
||||||
|
addView(renderer, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, Gravity.CENTER));
|
||||||
|
} else {
|
||||||
addView(renderer);
|
addView(renderer);
|
||||||
|
}
|
||||||
addView(imageView);
|
addView(imageView);
|
||||||
|
|
||||||
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
|
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
|
||||||
|
|
|
@ -3,13 +3,11 @@ package org.telegram.ui.Components.voip;
|
||||||
import android.animation.Animator;
|
import android.animation.Animator;
|
||||||
import android.animation.AnimatorListenerAdapter;
|
import android.animation.AnimatorListenerAdapter;
|
||||||
import android.app.Activity;
|
import android.app.Activity;
|
||||||
import android.app.KeyguardManager;
|
|
||||||
import android.content.Context;
|
import android.content.Context;
|
||||||
import android.content.pm.ActivityInfo;
|
import android.content.pm.ActivityInfo;
|
||||||
import android.graphics.PixelFormat;
|
import android.graphics.PixelFormat;
|
||||||
import android.os.Build;
|
import android.os.Build;
|
||||||
import android.view.Gravity;
|
import android.view.Gravity;
|
||||||
import android.view.KeyEvent;
|
|
||||||
import android.view.MotionEvent;
|
import android.view.MotionEvent;
|
||||||
import android.view.VelocityTracker;
|
import android.view.VelocityTracker;
|
||||||
import android.view.View;
|
import android.view.View;
|
||||||
|
@ -134,7 +132,7 @@ public class VoIPWindowView extends FrameLayout {
|
||||||
try {
|
try {
|
||||||
WindowManager wm = (WindowManager) activity.getSystemService(Context.WINDOW_SERVICE);
|
WindowManager wm = (WindowManager) activity.getSystemService(Context.WINDOW_SERVICE);
|
||||||
wm.removeView(VoIPWindowView.this);
|
wm.removeView(VoIPWindowView.this);
|
||||||
} catch (Exception e) {
|
} catch (Exception ignore) {
|
||||||
|
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
|
@ -150,7 +148,7 @@ public class VoIPWindowView extends FrameLayout {
|
||||||
setVisibility(View.GONE);
|
setVisibility(View.GONE);
|
||||||
try {
|
try {
|
||||||
wm.removeView(VoIPWindowView.this);
|
wm.removeView(VoIPWindowView.this);
|
||||||
} catch (Exception e) {
|
} catch (Exception ignore) {
|
||||||
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -82,6 +82,7 @@ import org.telegram.messenger.Utilities;
|
||||||
import org.telegram.messenger.browser.Browser;
|
import org.telegram.messenger.browser.Browser;
|
||||||
import org.telegram.messenger.camera.CameraController;
|
import org.telegram.messenger.camera.CameraController;
|
||||||
import org.telegram.messenger.voip.VoIPPendingCall;
|
import org.telegram.messenger.voip.VoIPPendingCall;
|
||||||
|
import org.telegram.messenger.voip.VoIPService;
|
||||||
import org.telegram.tgnet.ConnectionsManager;
|
import org.telegram.tgnet.ConnectionsManager;
|
||||||
import org.telegram.tgnet.TLRPC;
|
import org.telegram.tgnet.TLRPC;
|
||||||
import org.telegram.ui.ActionBar.ActionBarLayout;
|
import org.telegram.ui.ActionBar.ActionBarLayout;
|
||||||
|
@ -111,7 +112,6 @@ import org.telegram.ui.Components.StickersAlert;
|
||||||
import org.telegram.ui.Components.TermsOfServiceView;
|
import org.telegram.ui.Components.TermsOfServiceView;
|
||||||
import org.telegram.ui.Components.ThemeEditorView;
|
import org.telegram.ui.Components.ThemeEditorView;
|
||||||
import org.telegram.ui.Components.UpdateAppAlertDialog;
|
import org.telegram.ui.Components.UpdateAppAlertDialog;
|
||||||
import org.telegram.ui.Components.voip.VoIPPiPView;
|
|
||||||
|
|
||||||
import java.io.File;
|
import java.io.File;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
|
@ -1001,13 +1001,17 @@ public class LaunchActivity extends Activity implements ActionBarLayout.ActionBa
|
||||||
int flags = intent.getFlags();
|
int flags = intent.getFlags();
|
||||||
final int[] intentAccount = new int[]{intent.getIntExtra("currentAccount", UserConfig.selectedAccount)};
|
final int[] intentAccount = new int[]{intent.getIntExtra("currentAccount", UserConfig.selectedAccount)};
|
||||||
switchToAccount(intentAccount[0], true);
|
switchToAccount(intentAccount[0], true);
|
||||||
|
boolean isVoipIntent = intent.getAction() != null && intent.getAction().equals("voip");
|
||||||
if (!fromPassword && (AndroidUtilities.needShowPasscode(true) || SharedConfig.isWaitingForPasscodeEnter)) {
|
if (!fromPassword && (AndroidUtilities.needShowPasscode(true) || SharedConfig.isWaitingForPasscodeEnter)) {
|
||||||
showPasscodeActivity();
|
showPasscodeActivity();
|
||||||
|
UserConfig.getInstance(currentAccount).saveConfig(false);
|
||||||
|
if (!isVoipIntent) {
|
||||||
passcodeSaveIntent = intent;
|
passcodeSaveIntent = intent;
|
||||||
passcodeSaveIntentIsNew = isNew;
|
passcodeSaveIntentIsNew = isNew;
|
||||||
passcodeSaveIntentIsRestore = restore;
|
passcodeSaveIntentIsRestore = restore;
|
||||||
UserConfig.getInstance(currentAccount).saveConfig(false);
|
return false;
|
||||||
} else {
|
}
|
||||||
|
}
|
||||||
boolean pushOpened = false;
|
boolean pushOpened = false;
|
||||||
|
|
||||||
int push_user_id = 0;
|
int push_user_id = 0;
|
||||||
|
@ -1884,18 +1888,13 @@ public class LaunchActivity extends Activity implements ActionBarLayout.ActionBa
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (intent.getAction() != null && intent.getAction().equals("voip")) {
|
if (isVoipIntent) {
|
||||||
VoIPFragment.show(this);
|
VoIPFragment.show(this);
|
||||||
|
|
||||||
//Intent i = new Intent(this, VoIPActivity.class).addFlags(Intent.FLAG_ACTIVITY_NEW_TASK) ;
|
|
||||||
// startActivity(i);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
intent.setAction(null);
|
intent.setAction(null);
|
||||||
return pushOpened;
|
return pushOpened;
|
||||||
}
|
}
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
private void runLinkRequest(final int intentAccount,
|
private void runLinkRequest(final int intentAccount,
|
||||||
final String username,
|
final String username,
|
||||||
|
@ -2934,9 +2933,13 @@ public class LaunchActivity extends Activity implements ActionBarLayout.ActionBa
|
||||||
super.onPause();
|
super.onPause();
|
||||||
NotificationCenter.getGlobalInstance().postNotificationName(NotificationCenter.stopAllHeavyOperations, 4096);
|
NotificationCenter.getGlobalInstance().postNotificationName(NotificationCenter.stopAllHeavyOperations, 4096);
|
||||||
ApplicationLoader.mainInterfacePaused = true;
|
ApplicationLoader.mainInterfacePaused = true;
|
||||||
|
int account = currentAccount;
|
||||||
Utilities.stageQueue.postRunnable(() -> {
|
Utilities.stageQueue.postRunnable(() -> {
|
||||||
ApplicationLoader.mainInterfacePausedStageQueue = true;
|
ApplicationLoader.mainInterfacePausedStageQueue = true;
|
||||||
ApplicationLoader.mainInterfacePausedStageQueueTime = 0;
|
ApplicationLoader.mainInterfacePausedStageQueueTime = 0;
|
||||||
|
if (VoIPService.getSharedInstance() == null) {
|
||||||
|
MessagesController.getInstance(account).ignoreSetOnline = false;
|
||||||
|
}
|
||||||
});
|
});
|
||||||
onPasscodePause();
|
onPasscodePause();
|
||||||
actionBarLayout.onPause();
|
actionBarLayout.onPause();
|
||||||
|
@ -3069,7 +3072,7 @@ public class LaunchActivity extends Activity implements ActionBarLayout.ActionBa
|
||||||
checkAppUpdate(false);
|
checkAppUpdate(false);
|
||||||
|
|
||||||
if (VoIPFragment.getInstance() != null) {
|
if (VoIPFragment.getInstance() != null) {
|
||||||
VoIPFragment.getInstance().onResume();
|
VoIPFragment.onResume();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1433,6 +1433,9 @@ public class StatisticActivity extends BaseFragment implements NotificationCente
|
||||||
params.date = d;
|
params.date = d;
|
||||||
|
|
||||||
int dateIndex = Arrays.binarySearch(data.chartData.x, d);
|
int dateIndex = Arrays.binarySearch(data.chartData.x, d);
|
||||||
|
if (dateIndex < 0) {
|
||||||
|
dateIndex = data.chartData.x.length - 1;
|
||||||
|
}
|
||||||
params.xPercentage = data.chartData.xPercentage[dateIndex];
|
params.xPercentage = data.chartData.xPercentage[dateIndex];
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -571,8 +571,13 @@ public class VoIPFragment implements VoIPBaseService.StateListener, Notification
|
||||||
callingUserMiniFloatingLayout.setFloatingMode(true, false);
|
callingUserMiniFloatingLayout.setFloatingMode(true, false);
|
||||||
callingUserMiniTextureRenderer = new TextureViewRenderer(context);
|
callingUserMiniTextureRenderer = new TextureViewRenderer(context);
|
||||||
callingUserMiniTextureRenderer.setEnableHardwareScaler(true);
|
callingUserMiniTextureRenderer.setEnableHardwareScaler(true);
|
||||||
|
callingUserMiniTextureRenderer.setIsCamera(false);
|
||||||
|
callingUserMiniTextureRenderer.setScalingType(RendererCommon.ScalingType.SCALE_ASPECT_FIT);
|
||||||
|
|
||||||
callingUserMiniFloatingLayout.addView(callingUserMiniTextureRenderer);
|
View backgroundView = new View(context);
|
||||||
|
backgroundView.setBackgroundColor(0xff1b1f23);
|
||||||
|
callingUserMiniFloatingLayout.addView(backgroundView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT));
|
||||||
|
callingUserMiniFloatingLayout.addView(callingUserMiniTextureRenderer, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, Gravity.CENTER));
|
||||||
callingUserMiniFloatingLayout.setOnTapListener(view -> {
|
callingUserMiniFloatingLayout.setOnTapListener(view -> {
|
||||||
if (cameraForceExpanded && System.currentTimeMillis() - lastContentTapTime > 500) {
|
if (cameraForceExpanded && System.currentTimeMillis() - lastContentTapTime > 500) {
|
||||||
AndroidUtilities.cancelRunOnUIThread(hideUIRunnable);
|
AndroidUtilities.cancelRunOnUIThread(hideUIRunnable);
|
||||||
|
@ -622,7 +627,7 @@ public class VoIPFragment implements VoIPBaseService.StateListener, Notification
|
||||||
});
|
});
|
||||||
|
|
||||||
emojiRationalTextView = new TextView(context);
|
emojiRationalTextView = new TextView(context);
|
||||||
emojiRationalTextView.setText(LocaleController.formatString("CallEmojiKeyTooltip", R.string.CallEmojiKeyTooltip, callingUser.first_name));
|
emojiRationalTextView.setText(LocaleController.formatString("CallEmojiKeyTooltip", R.string.CallEmojiKeyTooltip, UserObject.getFirstName(callingUser)));
|
||||||
emojiRationalTextView.setTextSize(16);
|
emojiRationalTextView.setTextSize(16);
|
||||||
emojiRationalTextView.setTextColor(Color.WHITE);
|
emojiRationalTextView.setTextColor(Color.WHITE);
|
||||||
emojiRationalTextView.setGravity(Gravity.CENTER);
|
emojiRationalTextView.setGravity(Gravity.CENTER);
|
||||||
|
|
|
@ -11,6 +11,7 @@
|
||||||
package org.webrtc;
|
package org.webrtc;
|
||||||
|
|
||||||
import android.content.Context;
|
import android.content.Context;
|
||||||
|
import android.hardware.Camera;
|
||||||
import android.os.Handler;
|
import android.os.Handler;
|
||||||
import android.os.SystemClock;
|
import android.os.SystemClock;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
@ -45,6 +46,8 @@ class Camera1Session implements CameraSession {
|
||||||
// Used only for stats. Only used on the camera thread.
|
// Used only for stats. Only used on the camera thread.
|
||||||
private final long constructionTimeNs; // Construction time of this class.
|
private final long constructionTimeNs; // Construction time of this class.
|
||||||
|
|
||||||
|
private OrientationHelper orientationHelper;
|
||||||
|
|
||||||
private SessionState state;
|
private SessionState state;
|
||||||
private boolean firstFrameReported;
|
private boolean firstFrameReported;
|
||||||
|
|
||||||
|
@ -170,6 +173,7 @@ class Camera1Session implements CameraSession {
|
||||||
this.info = info;
|
this.info = info;
|
||||||
this.captureFormat = captureFormat;
|
this.captureFormat = captureFormat;
|
||||||
this.constructionTimeNs = constructionTimeNs;
|
this.constructionTimeNs = constructionTimeNs;
|
||||||
|
this.orientationHelper = new OrientationHelper();
|
||||||
|
|
||||||
surfaceTextureHelper.setTextureSize(captureFormat.width, captureFormat.height);
|
surfaceTextureHelper.setTextureSize(captureFormat.width, captureFormat.height);
|
||||||
|
|
||||||
|
@ -218,6 +222,7 @@ class Camera1Session implements CameraSession {
|
||||||
} else {
|
} else {
|
||||||
listenForBytebufferFrames();
|
listenForBytebufferFrames();
|
||||||
}
|
}
|
||||||
|
orientationHelper.start();
|
||||||
try {
|
try {
|
||||||
camera.startPreview();
|
camera.startPreview();
|
||||||
} catch (RuntimeException e) {
|
} catch (RuntimeException e) {
|
||||||
|
@ -242,6 +247,9 @@ class Camera1Session implements CameraSession {
|
||||||
camera.stopPreview();
|
camera.stopPreview();
|
||||||
camera.release();
|
camera.release();
|
||||||
events.onCameraClosed(this);
|
events.onCameraClosed(this);
|
||||||
|
if (orientationHelper != null) {
|
||||||
|
orientationHelper.stop();
|
||||||
|
}
|
||||||
Logging.d(TAG, "Stop done");
|
Logging.d(TAG, "Stop done");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -313,10 +321,11 @@ class Camera1Session implements CameraSession {
|
||||||
}
|
}
|
||||||
|
|
||||||
private int getFrameOrientation() {
|
private int getFrameOrientation() {
|
||||||
int rotation = CameraSession.getDeviceOrientation(applicationContext);
|
int rotation = orientationHelper.getOrientation();
|
||||||
if (info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_BACK) {
|
if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
|
||||||
rotation = 360 - rotation;
|
rotation = 360 - rotation;
|
||||||
}
|
}
|
||||||
|
OrientationHelper.cameraRotation = rotation;
|
||||||
return (info.orientation + rotation) % 360;
|
return (info.orientation + rotation) % 360;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -24,9 +24,11 @@ import android.os.Handler;
|
||||||
import androidx.annotation.Nullable;
|
import androidx.annotation.Nullable;
|
||||||
import android.util.Range;
|
import android.util.Range;
|
||||||
import android.view.Surface;
|
import android.view.Surface;
|
||||||
|
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.concurrent.TimeUnit;
|
import java.util.concurrent.TimeUnit;
|
||||||
|
|
||||||
import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
|
import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
|
||||||
|
|
||||||
@TargetApi(21)
|
@TargetApi(21)
|
||||||
|
@ -53,6 +55,8 @@ class Camera2Session implements CameraSession {
|
||||||
private final int height;
|
private final int height;
|
||||||
private final int framerate;
|
private final int framerate;
|
||||||
|
|
||||||
|
private OrientationHelper orientationHelper;
|
||||||
|
|
||||||
// Initialized at start
|
// Initialized at start
|
||||||
private CameraCharacteristics cameraCharacteristics;
|
private CameraCharacteristics cameraCharacteristics;
|
||||||
private int cameraOrientation;
|
private int cameraOrientation;
|
||||||
|
@ -292,6 +296,7 @@ class Camera2Session implements CameraSession {
|
||||||
this.width = width;
|
this.width = width;
|
||||||
this.height = height;
|
this.height = height;
|
||||||
this.framerate = framerate;
|
this.framerate = framerate;
|
||||||
|
this.orientationHelper = new OrientationHelper();
|
||||||
|
|
||||||
start();
|
start();
|
||||||
}
|
}
|
||||||
|
@ -306,6 +311,7 @@ class Camera2Session implements CameraSession {
|
||||||
reportError("getCameraCharacteristics(): " + e.getMessage());
|
reportError("getCameraCharacteristics(): " + e.getMessage());
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
orientationHelper.start();
|
||||||
cameraOrientation = cameraCharacteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
|
cameraOrientation = cameraCharacteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
|
||||||
isCameraFrontFacing = cameraCharacteristics.get(CameraCharacteristics.LENS_FACING)
|
isCameraFrontFacing = cameraCharacteristics.get(CameraCharacteristics.LENS_FACING)
|
||||||
== CameraMetadata.LENS_FACING_FRONT;
|
== CameraMetadata.LENS_FACING_FRONT;
|
||||||
|
@ -386,6 +392,9 @@ class Camera2Session implements CameraSession {
|
||||||
cameraDevice.close();
|
cameraDevice.close();
|
||||||
cameraDevice = null;
|
cameraDevice = null;
|
||||||
}
|
}
|
||||||
|
if (orientationHelper != null) {
|
||||||
|
orientationHelper.stop();
|
||||||
|
}
|
||||||
|
|
||||||
Logging.d(TAG, "Stop done");
|
Logging.d(TAG, "Stop done");
|
||||||
}
|
}
|
||||||
|
@ -405,10 +414,11 @@ class Camera2Session implements CameraSession {
|
||||||
}
|
}
|
||||||
|
|
||||||
private int getFrameOrientation() {
|
private int getFrameOrientation() {
|
||||||
int rotation = CameraSession.getDeviceOrientation(applicationContext);
|
int rotation = orientationHelper.getOrientation();
|
||||||
if (!isCameraFrontFacing) {
|
if (isCameraFrontFacing) {
|
||||||
rotation = 360 - rotation;
|
rotation = 360 - rotation;
|
||||||
}
|
}
|
||||||
|
OrientationHelper.cameraRotation = rotation;
|
||||||
return (cameraOrientation + rotation) % 360;
|
return (cameraOrientation + rotation) % 360;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -10,10 +10,7 @@
|
||||||
|
|
||||||
package org.webrtc;
|
package org.webrtc;
|
||||||
|
|
||||||
import android.content.Context;
|
|
||||||
import android.graphics.Matrix;
|
import android.graphics.Matrix;
|
||||||
import android.view.WindowManager;
|
|
||||||
import android.view.Surface;
|
|
||||||
|
|
||||||
interface CameraSession {
|
interface CameraSession {
|
||||||
enum FailureType { ERROR, DISCONNECTED }
|
enum FailureType { ERROR, DISCONNECTED }
|
||||||
|
@ -39,21 +36,6 @@ interface CameraSession {
|
||||||
*/
|
*/
|
||||||
void stop();
|
void stop();
|
||||||
|
|
||||||
static int getDeviceOrientation(Context context) {
|
|
||||||
final WindowManager wm = (WindowManager) context.getSystemService(Context.WINDOW_SERVICE);
|
|
||||||
switch (wm.getDefaultDisplay().getRotation()) {
|
|
||||||
case Surface.ROTATION_90:
|
|
||||||
return 90;
|
|
||||||
case Surface.ROTATION_180:
|
|
||||||
return 180;
|
|
||||||
case Surface.ROTATION_270:
|
|
||||||
return 270;
|
|
||||||
case Surface.ROTATION_0:
|
|
||||||
default:
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
static VideoFrame.TextureBuffer createTextureBufferWithModifiedTransformMatrix(
|
static VideoFrame.TextureBuffer createTextureBufferWithModifiedTransformMatrix(
|
||||||
TextureBufferImpl buffer, boolean mirror, int rotation) {
|
TextureBufferImpl buffer, boolean mirror, int rotation) {
|
||||||
final Matrix transformMatrix = new Matrix();
|
final Matrix transformMatrix = new Matrix();
|
||||||
|
|
|
@ -150,6 +150,8 @@ public class EglRenderer implements VideoSink {
|
||||||
// If true, mirrors the video stream vertically.
|
// If true, mirrors the video stream vertically.
|
||||||
private boolean mirrorVertically;
|
private boolean mirrorVertically;
|
||||||
|
|
||||||
|
private int rotation;
|
||||||
|
|
||||||
// These variables are synchronized on |statisticsLock|.
|
// These variables are synchronized on |statisticsLock|.
|
||||||
private final Object statisticsLock = new Object();
|
private final Object statisticsLock = new Object();
|
||||||
// Total number of video frames received in renderFrame() call.
|
// Total number of video frames received in renderFrame() call.
|
||||||
|
@ -532,6 +534,12 @@ public class EglRenderer implements VideoSink {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void setRotation(int value) {
|
||||||
|
synchronized (layoutLock) {
|
||||||
|
rotation = value;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Release EGL surface. This function will block until the EGL surface is released.
|
* Release EGL surface. This function will block until the EGL surface is released.
|
||||||
*/
|
*/
|
||||||
|
@ -637,7 +645,8 @@ public class EglRenderer implements VideoSink {
|
||||||
|
|
||||||
final long startTimeNs = System.nanoTime();
|
final long startTimeNs = System.nanoTime();
|
||||||
|
|
||||||
final float frameAspectRatio = frame.getRotatedWidth() / (float) frame.getRotatedHeight();
|
boolean rotate = Math.abs(rotation) == 90 || Math.abs(rotation) == 270;
|
||||||
|
final float frameAspectRatio = (rotate ? frame.getRotatedHeight() : frame.getRotatedWidth()) / (float) (rotate ? frame.getRotatedWidth() : frame.getRotatedHeight());
|
||||||
final float drawnAspectRatio;
|
final float drawnAspectRatio;
|
||||||
synchronized (layoutLock) {
|
synchronized (layoutLock) {
|
||||||
drawnAspectRatio = layoutAspectRatio != 0f ? layoutAspectRatio : frameAspectRatio;
|
drawnAspectRatio = layoutAspectRatio != 0f ? layoutAspectRatio : frameAspectRatio;
|
||||||
|
@ -656,6 +665,7 @@ public class EglRenderer implements VideoSink {
|
||||||
|
|
||||||
drawMatrix.reset();
|
drawMatrix.reset();
|
||||||
drawMatrix.preTranslate(0.5f, 0.5f);
|
drawMatrix.preTranslate(0.5f, 0.5f);
|
||||||
|
drawMatrix.preRotate(rotation);
|
||||||
drawMatrix.preScale(mirrorHorizontally ? -1f : 1f, mirrorVertically ? -1f : 1f);
|
drawMatrix.preScale(mirrorHorizontally ? -1f : 1f, mirrorVertically ? -1f : 1f);
|
||||||
drawMatrix.preScale(scaleX, scaleY);
|
drawMatrix.preScale(scaleX, scaleY);
|
||||||
drawMatrix.preTranslate(-0.5f, -0.5f);
|
drawMatrix.preTranslate(-0.5f, -0.5f);
|
||||||
|
@ -665,7 +675,7 @@ public class EglRenderer implements VideoSink {
|
||||||
GLES20.glClearColor(0 /* red */, 0 /* green */, 0 /* blue */, 0 /* alpha */);
|
GLES20.glClearColor(0 /* red */, 0 /* green */, 0 /* blue */, 0 /* alpha */);
|
||||||
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
|
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
|
||||||
frameDrawer.drawFrame(frame, drawer, drawMatrix, 0 /* viewportX */, 0 /* viewportY */,
|
frameDrawer.drawFrame(frame, drawer, drawMatrix, 0 /* viewportX */, 0 /* viewportY */,
|
||||||
eglBase.surfaceWidth(), eglBase.surfaceHeight());
|
eglBase.surfaceWidth(), eglBase.surfaceHeight(), rotate);
|
||||||
|
|
||||||
final long swapBuffersStartTimeNs = System.nanoTime();
|
final long swapBuffersStartTimeNs = System.nanoTime();
|
||||||
if (usePresentationTimeStamp) {
|
if (usePresentationTimeStamp) {
|
||||||
|
@ -715,6 +725,7 @@ public class EglRenderer implements VideoSink {
|
||||||
|
|
||||||
drawMatrix.reset();
|
drawMatrix.reset();
|
||||||
drawMatrix.preTranslate(0.5f, 0.5f);
|
drawMatrix.preTranslate(0.5f, 0.5f);
|
||||||
|
drawMatrix.preRotate(rotation);
|
||||||
drawMatrix.preScale(mirrorHorizontally ? -1f : 1f, mirrorVertically ? -1f : 1f);
|
drawMatrix.preScale(mirrorHorizontally ? -1f : 1f, mirrorVertically ? -1f : 1f);
|
||||||
drawMatrix.preScale(1f, -1f); // We want the output to be upside down for Bitmap.
|
drawMatrix.preScale(1f, -1f); // We want the output to be upside down for Bitmap.
|
||||||
drawMatrix.preTranslate(-0.5f, -0.5f);
|
drawMatrix.preTranslate(-0.5f, -0.5f);
|
||||||
|
@ -744,7 +755,7 @@ public class EglRenderer implements VideoSink {
|
||||||
GLES20.glClearColor(0 /* red */, 0 /* green */, 0 /* blue */, 0 /* alpha */);
|
GLES20.glClearColor(0 /* red */, 0 /* green */, 0 /* blue */, 0 /* alpha */);
|
||||||
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
|
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
|
||||||
frameDrawer.drawFrame(frame, listenerAndParams.drawer, drawMatrix, 0 /* viewportX */,
|
frameDrawer.drawFrame(frame, listenerAndParams.drawer, drawMatrix, 0 /* viewportX */,
|
||||||
0 /* viewportY */, scaledWidth, scaledHeight);
|
0 /* viewportY */, scaledWidth, scaledHeight, false);
|
||||||
|
|
||||||
final ByteBuffer bitmapBuffer = ByteBuffer.allocateDirect(scaledWidth * scaledHeight * 4);
|
final ByteBuffer bitmapBuffer = ByteBuffer.allocateDirect(scaledWidth * scaledHeight * 4);
|
||||||
GLES20.glViewport(0, 0, scaledWidth, scaledHeight);
|
GLES20.glViewport(0, 0, scaledWidth, scaledHeight);
|
||||||
|
|
|
@ -35,43 +35,66 @@ public class HardwareVideoEncoderFactory implements VideoEncoderFactory {
|
||||||
private static final int QCOM_VP8_KEY_FRAME_INTERVAL_ANDROID_M_MS = 20000;
|
private static final int QCOM_VP8_KEY_FRAME_INTERVAL_ANDROID_M_MS = 20000;
|
||||||
private static final int QCOM_VP8_KEY_FRAME_INTERVAL_ANDROID_N_MS = 15000;
|
private static final int QCOM_VP8_KEY_FRAME_INTERVAL_ANDROID_N_MS = 15000;
|
||||||
|
|
||||||
// List of devices with poor H.264 encoder quality.
|
|
||||||
// HW H.264 encoder on below devices has poor bitrate control - actual
|
|
||||||
// bitrates deviates a lot from the target value.
|
|
||||||
private static final List<String> H264_HW_EXCEPTION_MODELS =
|
|
||||||
Arrays.asList("SAMSUNG-SGH-I337", "Nexus 7", "Nexus 4", "Pixel 3 XL", "Pixel 3");
|
|
||||||
|
|
||||||
private static final List<String> VP8_HW_EXCEPTION_MODELS =
|
|
||||||
Arrays.asList("Pixel 3 XL", "Pixel 3");
|
|
||||||
|
|
||||||
@Nullable private final EglBase14.Context sharedContext;
|
@Nullable private final EglBase14.Context sharedContext;
|
||||||
private final boolean enableIntelVp8Encoder;
|
private final boolean enableIntelVp8Encoder;
|
||||||
private final boolean enableH264HighProfile;
|
private final boolean enableH264HighProfile;
|
||||||
@Nullable private final Predicate<MediaCodecInfo> codecAllowedPredicate;
|
@Nullable private final Predicate<MediaCodecInfo> codecAllowedPredicate;
|
||||||
|
|
||||||
|
private static final List<String> H264_HW_EXCEPTION_MODELS =
|
||||||
|
Arrays.asList("samsung-sgh-i337", "nexus7", "nexus4", "pixel3xl", "pixel3");
|
||||||
|
|
||||||
|
private static final List<String> VP8_HW_EXCEPTION_MODELS =
|
||||||
|
Arrays.asList("pixel3xl", "pixel3");
|
||||||
|
|
||||||
private static Set<String> HW_EXCEPTION_MODELS = new HashSet<String>() {{
|
private static Set<String> HW_EXCEPTION_MODELS = new HashSet<String>() {{
|
||||||
add("SM-A310F");
|
add("sm-a310f");
|
||||||
add("SM-A310F/DS");
|
add("sm-a310f/ds");
|
||||||
add("SM-A310Y");
|
add("sm-a310y");
|
||||||
add("SM-A310M");
|
add("sm-a310m");
|
||||||
add("SM-G920F");
|
add("sm-g920f");
|
||||||
add("SM-G920FD");
|
add("sm-g920fd");
|
||||||
add("SM-G920FQ");
|
add("sm-g920fq");
|
||||||
add("SM-G920I");
|
add("sm-g920i");
|
||||||
add("SM-G920A");
|
add("sm-g920a");
|
||||||
add("SM-G920T");
|
add("sm-g920t");
|
||||||
add("SM-G930F");
|
add("sm-g930f");
|
||||||
add("SM-G930FD");
|
add("sm-g930fd");
|
||||||
add("SM-G930W8");
|
add("sm-g930w8");
|
||||||
add("SM-G930S");
|
add("sm-g930s");
|
||||||
add("SM-G930K");
|
add("sm-g930k");
|
||||||
add("SM-G930L");
|
add("sm-g930l");
|
||||||
add("SM-G935F");
|
add("sm-g935f");
|
||||||
add("SM-G935FD");
|
add("sm-g935fd");
|
||||||
add("SM-G935W8");
|
add("sm-g935w8");
|
||||||
add("SM-G935S");
|
add("sm-g935s");
|
||||||
add("SM-G935K");
|
add("sm-g935k");
|
||||||
add("SM-G935L");
|
add("sm-g935l");
|
||||||
|
|
||||||
|
add("i537");
|
||||||
|
add("sgh-i537");
|
||||||
|
add("gt-i9295");
|
||||||
|
add("sgh-i337");
|
||||||
|
add("gt-i9505g");
|
||||||
|
add("gt-i9505");
|
||||||
|
add("gt-i9515");
|
||||||
|
add("f240");
|
||||||
|
add("e980");
|
||||||
|
add("ls980");
|
||||||
|
add("e988");
|
||||||
|
add("e986");
|
||||||
|
add("f240l");
|
||||||
|
add("f240s");
|
||||||
|
add("v9815");
|
||||||
|
add("nx403a");
|
||||||
|
add("f310l");
|
||||||
|
add("f310lr");
|
||||||
|
add("onem7");
|
||||||
|
add("onemax");
|
||||||
|
add("pn071");
|
||||||
|
add("htc6500lvw");
|
||||||
|
add("butterflys");
|
||||||
|
add("mi2s");
|
||||||
|
add("n1");
|
||||||
}};
|
}};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -226,8 +249,13 @@ public class HardwareVideoEncoderFactory implements VideoEncoderFactory {
|
||||||
|
|
||||||
// Returns true if the given MediaCodecInfo indicates a hardware module that is supported on the
|
// Returns true if the given MediaCodecInfo indicates a hardware module that is supported on the
|
||||||
// current SDK.
|
// current SDK.
|
||||||
|
|
||||||
|
private static String getModel() {
|
||||||
|
return Build.MODEL != null ? Build.MODEL.toLowerCase().replace(" ", "") : "nomodel";
|
||||||
|
}
|
||||||
|
|
||||||
private boolean isHardwareSupportedInCurrentSdk(MediaCodecInfo info, VideoCodecMimeType type) {
|
private boolean isHardwareSupportedInCurrentSdk(MediaCodecInfo info, VideoCodecMimeType type) {
|
||||||
if (HW_EXCEPTION_MODELS.contains(Build.MODEL)) {
|
if (HW_EXCEPTION_MODELS.contains(getModel())) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
switch (type) {
|
switch (type) {
|
||||||
|
@ -244,7 +272,7 @@ public class HardwareVideoEncoderFactory implements VideoEncoderFactory {
|
||||||
}
|
}
|
||||||
|
|
||||||
private boolean isHardwareSupportedInCurrentSdkVp8(MediaCodecInfo info) {
|
private boolean isHardwareSupportedInCurrentSdkVp8(MediaCodecInfo info) {
|
||||||
if (VP8_HW_EXCEPTION_MODELS.contains(Build.MODEL)) {
|
if (VP8_HW_EXCEPTION_MODELS.contains(getModel())) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
String name = info.getName();
|
String name = info.getName();
|
||||||
|
@ -268,7 +296,7 @@ public class HardwareVideoEncoderFactory implements VideoEncoderFactory {
|
||||||
|
|
||||||
private boolean isHardwareSupportedInCurrentSdkH264(MediaCodecInfo info) {
|
private boolean isHardwareSupportedInCurrentSdkH264(MediaCodecInfo info) {
|
||||||
// First, H264 hardware might perform poorly on this model.
|
// First, H264 hardware might perform poorly on this model.
|
||||||
if (H264_HW_EXCEPTION_MODELS.contains(Build.MODEL)) {
|
if (H264_HW_EXCEPTION_MODELS.contains(getModel())) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
String name = info.getName();
|
String name = info.getName();
|
||||||
|
|
|
@ -0,0 +1,68 @@
|
||||||
|
package org.webrtc;
|
||||||
|
|
||||||
|
import android.view.OrientationEventListener;
|
||||||
|
|
||||||
|
import org.telegram.messenger.ApplicationLoader;
|
||||||
|
|
||||||
|
public class OrientationHelper {
|
||||||
|
|
||||||
|
private static final int ORIENTATION_HYSTERESIS = 5;
|
||||||
|
private OrientationEventListener orientationEventListener;
|
||||||
|
private int rotation;
|
||||||
|
|
||||||
|
public static volatile int cameraRotation;
|
||||||
|
|
||||||
|
private int roundOrientation(int orientation, int orientationHistory) {
|
||||||
|
boolean changeOrientation;
|
||||||
|
if (orientationHistory == OrientationEventListener.ORIENTATION_UNKNOWN) {
|
||||||
|
changeOrientation = true;
|
||||||
|
} else {
|
||||||
|
int dist = Math.abs(orientation - orientationHistory);
|
||||||
|
dist = Math.min(dist, 360 - dist);
|
||||||
|
changeOrientation = (dist >= 45 + ORIENTATION_HYSTERESIS);
|
||||||
|
}
|
||||||
|
if (changeOrientation) {
|
||||||
|
return ((orientation + 45) / 90 * 90) % 360;
|
||||||
|
}
|
||||||
|
return orientationHistory;
|
||||||
|
}
|
||||||
|
|
||||||
|
public OrientationHelper() {
|
||||||
|
orientationEventListener = new OrientationEventListener(ApplicationLoader.applicationContext) {
|
||||||
|
@Override
|
||||||
|
public void onOrientationChanged(int orientation) {
|
||||||
|
if (orientationEventListener == null || orientation == ORIENTATION_UNKNOWN) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
int newOrietation = roundOrientation(orientation, rotation);
|
||||||
|
if (newOrietation != rotation) {
|
||||||
|
onOrientationUpdate(rotation = newOrietation);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
protected void onOrientationUpdate(int orientation) {
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
public void start() {
|
||||||
|
if (orientationEventListener.canDetectOrientation()) {
|
||||||
|
orientationEventListener.enable();
|
||||||
|
} else {
|
||||||
|
orientationEventListener.disable();
|
||||||
|
orientationEventListener = null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void stop() {
|
||||||
|
if (orientationEventListener != null) {
|
||||||
|
orientationEventListener.disable();
|
||||||
|
orientationEventListener = null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public int getOrientation() {
|
||||||
|
return rotation;
|
||||||
|
}
|
||||||
|
}
|
|
@ -88,7 +88,7 @@ public class RendererCommon {
|
||||||
this.visibleFractionMismatchOrientation = visibleFractionMismatchOrientation;
|
this.visibleFractionMismatchOrientation = visibleFractionMismatchOrientation;
|
||||||
}
|
}
|
||||||
|
|
||||||
public Point measure(int widthSpec, int heightSpec, int frameWidth, int frameHeight) {
|
public Point measure(boolean isCamera, int widthSpec, int heightSpec, int frameWidth, int frameHeight) {
|
||||||
// Calculate max allowed layout size.
|
// Calculate max allowed layout size.
|
||||||
final int maxWidth = View.getDefaultSize(Integer.MAX_VALUE, widthSpec);
|
final int maxWidth = View.getDefaultSize(Integer.MAX_VALUE, widthSpec);
|
||||||
final int maxHeight = View.getDefaultSize(Integer.MAX_VALUE, heightSpec);
|
final int maxHeight = View.getDefaultSize(Integer.MAX_VALUE, heightSpec);
|
||||||
|
@ -108,7 +108,7 @@ public class RendererCommon {
|
||||||
if (View.MeasureSpec.getMode(widthSpec) == View.MeasureSpec.EXACTLY) {
|
if (View.MeasureSpec.getMode(widthSpec) == View.MeasureSpec.EXACTLY) {
|
||||||
layoutSize.x = maxWidth;
|
layoutSize.x = maxWidth;
|
||||||
}
|
}
|
||||||
if (View.MeasureSpec.getMode(heightSpec) == View.MeasureSpec.EXACTLY) {
|
if (View.MeasureSpec.getMode(heightSpec) == View.MeasureSpec.EXACTLY || !isCamera && (frameAspect > 1.0f) == (displayAspect > 1.0f)) {
|
||||||
layoutSize.y = maxHeight;
|
layoutSize.y = maxHeight;
|
||||||
}
|
}
|
||||||
return layoutSize;
|
return layoutSize;
|
||||||
|
|
|
@ -190,7 +190,7 @@ public class SurfaceViewRenderer extends SurfaceView
|
||||||
protected void onMeasure(int widthSpec, int heightSpec) {
|
protected void onMeasure(int widthSpec, int heightSpec) {
|
||||||
ThreadUtils.checkIsOnMainThread();
|
ThreadUtils.checkIsOnMainThread();
|
||||||
Point size =
|
Point size =
|
||||||
videoLayoutMeasure.measure(widthSpec, heightSpec, rotatedFrameWidth, rotatedFrameHeight);
|
videoLayoutMeasure.measure(true, widthSpec, heightSpec, rotatedFrameWidth, rotatedFrameHeight);
|
||||||
setMeasuredDimension(size.x, size.y);
|
setMeasuredDimension(size.x, size.y);
|
||||||
logD("onMeasure(). New size: " + size.x + "x" + size.y);
|
logD("onMeasure(). New size: " + size.x + "x" + size.y);
|
||||||
}
|
}
|
||||||
|
|
|
@ -6,6 +6,7 @@ import android.graphics.Point;
|
||||||
import android.graphics.SurfaceTexture;
|
import android.graphics.SurfaceTexture;
|
||||||
import android.os.Looper;
|
import android.os.Looper;
|
||||||
import android.view.TextureView;
|
import android.view.TextureView;
|
||||||
|
import android.view.View;
|
||||||
|
|
||||||
import org.telegram.messenger.AndroidUtilities;
|
import org.telegram.messenger.AndroidUtilities;
|
||||||
|
|
||||||
|
@ -31,6 +32,9 @@ public class TextureViewRenderer extends TextureView
|
||||||
private boolean enableFixedSize;
|
private boolean enableFixedSize;
|
||||||
private int surfaceWidth;
|
private int surfaceWidth;
|
||||||
private int surfaceHeight;
|
private int surfaceHeight;
|
||||||
|
private boolean isCamera;
|
||||||
|
|
||||||
|
private OrientationHelper orientationHelper;
|
||||||
|
|
||||||
public static class TextureEglRenderer extends EglRenderer implements TextureView.SurfaceTextureListener {
|
public static class TextureEglRenderer extends EglRenderer implements TextureView.SurfaceTextureListener {
|
||||||
private static final String TAG = "TextureEglRenderer";
|
private static final String TAG = "TextureEglRenderer";
|
||||||
|
@ -218,6 +222,9 @@ public class TextureViewRenderer extends TextureView
|
||||||
*/
|
*/
|
||||||
public void release() {
|
public void release() {
|
||||||
eglRenderer.release();
|
eglRenderer.release();
|
||||||
|
if (orientationHelper != null) {
|
||||||
|
orientationHelper.stop();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -251,6 +258,19 @@ public class TextureViewRenderer extends TextureView
|
||||||
eglRenderer.removeFrameListener(listener);
|
eglRenderer.removeFrameListener(listener);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void setIsCamera(boolean value) {
|
||||||
|
isCamera = value;
|
||||||
|
if (!isCamera) {
|
||||||
|
orientationHelper = new OrientationHelper() {
|
||||||
|
@Override
|
||||||
|
protected void onOrientationUpdate(int orientation) {
|
||||||
|
updateRotation();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
orientationHelper.start();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Enables fixed size for the surface. This provides better performance but might be buggy on some
|
* Enables fixed size for the surface. This provides better performance but might be buggy on some
|
||||||
* devices. By default this is turned off.
|
* devices. By default this is turned off.
|
||||||
|
@ -261,6 +281,45 @@ public class TextureViewRenderer extends TextureView
|
||||||
updateSurfaceSize();
|
updateSurfaceSize();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private void updateRotation() {
|
||||||
|
if (orientationHelper == null || rotatedFrameWidth == 0 || rotatedFrameHeight == 0) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
View parentView = (View) getParent();
|
||||||
|
if (parentView == null) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
int orientation = orientationHelper.getOrientation();
|
||||||
|
float viewWidth = getMeasuredWidth();
|
||||||
|
float viewHeight = getMeasuredHeight();
|
||||||
|
float w;
|
||||||
|
float h;
|
||||||
|
float targetWidth = parentView.getMeasuredWidth();
|
||||||
|
float targetHeight = parentView.getMeasuredHeight();
|
||||||
|
if (orientation == 90 || orientation == 270) {
|
||||||
|
w = viewHeight;
|
||||||
|
h = viewWidth;
|
||||||
|
} else {
|
||||||
|
w = viewWidth;
|
||||||
|
h = viewHeight;
|
||||||
|
}
|
||||||
|
float scale;
|
||||||
|
if (w < h) {
|
||||||
|
scale = Math.max(w / viewWidth, h / viewHeight);
|
||||||
|
} else {
|
||||||
|
scale = Math.min(w / viewWidth, h / viewHeight);
|
||||||
|
}
|
||||||
|
w *= scale;
|
||||||
|
h *= scale;
|
||||||
|
if (Math.abs(w / h - targetWidth / targetHeight) < 0.1f) {
|
||||||
|
scale *= Math.max(targetWidth / w, targetHeight / h);
|
||||||
|
}
|
||||||
|
if (orientation == 270) {
|
||||||
|
orientation = -90;
|
||||||
|
}
|
||||||
|
animate().scaleX(scale).scaleY(scale).rotation(-orientation).setDuration(180).start();
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Set if the video stream should be mirrored or not.
|
* Set if the video stream should be mirrored or not.
|
||||||
*/
|
*/
|
||||||
|
@ -312,8 +371,11 @@ public class TextureViewRenderer extends TextureView
|
||||||
@Override
|
@Override
|
||||||
protected void onMeasure(int widthSpec, int heightSpec) {
|
protected void onMeasure(int widthSpec, int heightSpec) {
|
||||||
ThreadUtils.checkIsOnMainThread();
|
ThreadUtils.checkIsOnMainThread();
|
||||||
Point size = videoLayoutMeasure.measure(widthSpec, heightSpec, rotatedFrameWidth, rotatedFrameHeight);
|
Point size = videoLayoutMeasure.measure(isCamera, widthSpec, heightSpec, rotatedFrameWidth, rotatedFrameHeight);
|
||||||
setMeasuredDimension(size.x, size.y);
|
setMeasuredDimension(size.x, size.y);
|
||||||
|
if (!isCamera) {
|
||||||
|
updateRotation();
|
||||||
|
}
|
||||||
logD("onMeasure(). New size: " + size.x + "x" + size.y);
|
logD("onMeasure(). New size: " + size.x + "x" + size.y);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -337,7 +399,7 @@ public class TextureViewRenderer extends TextureView
|
||||||
drawnFrameHeight = rotatedFrameHeight;
|
drawnFrameHeight = rotatedFrameHeight;
|
||||||
} else {
|
} else {
|
||||||
drawnFrameWidth = rotatedFrameWidth;
|
drawnFrameWidth = rotatedFrameWidth;
|
||||||
drawnFrameHeight = (int) (rotatedFrameWidth / layoutAspectRatio);
|
drawnFrameHeight = (int) (rotatedFrameHeight / layoutAspectRatio);
|
||||||
}
|
}
|
||||||
// Aspect ratio of the drawn frame and the view is the same.
|
// Aspect ratio of the drawn frame and the view is the same.
|
||||||
final int width = Math.min(getWidth(), drawnFrameWidth);
|
final int width = Math.min(getWidth(), drawnFrameWidth);
|
||||||
|
@ -413,6 +475,9 @@ public class TextureViewRenderer extends TextureView
|
||||||
if (rendererEvents != null) {
|
if (rendererEvents != null) {
|
||||||
rendererEvents.onFrameResolutionChanged(videoWidth, videoHeight, rotation);
|
rendererEvents.onFrameResolutionChanged(videoWidth, videoHeight, rotation);
|
||||||
}
|
}
|
||||||
|
if (isCamera) {
|
||||||
|
eglRenderer.setRotation(-OrientationHelper.cameraRotation);
|
||||||
|
}
|
||||||
int rotatedWidth = rotation == 0 || rotation == 180 ? videoWidth : videoHeight;
|
int rotatedWidth = rotation == 0 || rotation == 180 ? videoWidth : videoHeight;
|
||||||
int rotatedHeight = rotation == 0 || rotation == 180 ? videoHeight : videoWidth;
|
int rotatedHeight = rotation == 0 || rotation == 180 ? videoHeight : videoWidth;
|
||||||
// run immediately if possible for ui thread tests
|
// run immediately if possible for ui thread tests
|
||||||
|
|
|
@ -182,14 +182,14 @@ public class VideoFrameDrawer {
|
||||||
public void drawFrame(
|
public void drawFrame(
|
||||||
VideoFrame frame, RendererCommon.GlDrawer drawer, Matrix additionalRenderMatrix) {
|
VideoFrame frame, RendererCommon.GlDrawer drawer, Matrix additionalRenderMatrix) {
|
||||||
drawFrame(frame, drawer, additionalRenderMatrix, 0 /* viewportX */, 0 /* viewportY */,
|
drawFrame(frame, drawer, additionalRenderMatrix, 0 /* viewportX */, 0 /* viewportY */,
|
||||||
frame.getRotatedWidth(), frame.getRotatedHeight());
|
frame.getRotatedWidth(), frame.getRotatedHeight(), false);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void drawFrame(VideoFrame frame, RendererCommon.GlDrawer drawer,
|
public void drawFrame(VideoFrame frame, RendererCommon.GlDrawer drawer,
|
||||||
@Nullable Matrix additionalRenderMatrix, int viewportX, int viewportY, int viewportWidth,
|
@Nullable Matrix additionalRenderMatrix, int viewportX, int viewportY, int viewportWidth,
|
||||||
int viewportHeight) {
|
int viewportHeight, boolean rotate) {
|
||||||
final int width = frame.getRotatedWidth();
|
final int width = rotate ? frame.getRotatedHeight() : frame.getRotatedWidth();
|
||||||
final int height = frame.getRotatedHeight();
|
final int height = rotate ? frame.getRotatedWidth() : frame.getRotatedHeight();
|
||||||
calculateTransformedRenderSize(width, height, additionalRenderMatrix);
|
calculateTransformedRenderSize(width, height, additionalRenderMatrix);
|
||||||
if (renderWidth <= 0 || renderHeight <= 0) {
|
if (renderWidth <= 0 || renderHeight <= 0) {
|
||||||
Logging.w(TAG, "Illegal frame size: " + renderWidth + "x" + renderHeight);
|
Logging.w(TAG, "Illegal frame size: " + renderWidth + "x" + renderHeight);
|
||||||
|
|
Loading…
Reference in a new issue