mirror of
https://github.com/DrKLO/Telegram.git
synced 2024-12-22 06:25:14 +01:00
Update to 7.0.0 (2064)
This commit is contained in:
parent
6e495f54b8
commit
4c5f32babf
67 changed files with 2447 additions and 1283 deletions
|
@ -280,7 +280,7 @@ android {
|
|||
}
|
||||
}
|
||||
|
||||
defaultConfig.versionCode = 2061
|
||||
defaultConfig.versionCode = 2064
|
||||
|
||||
applicationVariants.all { variant ->
|
||||
variant.outputs.all { output ->
|
||||
|
|
|
@ -71,6 +71,10 @@ struct InstanceHolder {
|
|||
std::shared_ptr<tgcalls::VideoCaptureInterface> _videoCapture;
|
||||
};
|
||||
|
||||
jclass TrafficStatsClass;
|
||||
jclass FinalStateClass;
|
||||
jmethodID FinalStateInitMethod;
|
||||
|
||||
jlong getInstanceHolderId(JNIEnv *env, jobject obj) {
|
||||
return env->GetLongField(obj, env->GetFieldID(env->GetObjectClass(obj), "nativePtr", "J"));
|
||||
}
|
||||
|
@ -200,9 +204,8 @@ jint asJavaState(const State &state) {
|
|||
}
|
||||
|
||||
jobject asJavaTrafficStats(JNIEnv *env, const TrafficStats &trafficStats) {
|
||||
jclass clazz = env->FindClass("org/telegram/messenger/voip/Instance$TrafficStats");
|
||||
jmethodID initMethodId = env->GetMethodID(clazz, "<init>", "(JJJJ)V");
|
||||
return env->NewObject(clazz, initMethodId, (jlong) trafficStats.bytesSentWifi, (jlong) trafficStats.bytesReceivedWifi, (jlong) trafficStats.bytesSentMobile, (jlong) trafficStats.bytesReceivedMobile);
|
||||
jmethodID initMethodId = env->GetMethodID(TrafficStatsClass, "<init>", "(JJJJ)V");
|
||||
return env->NewObject(TrafficStatsClass, initMethodId, (jlong) trafficStats.bytesSentWifi, (jlong) trafficStats.bytesReceivedWifi, (jlong) trafficStats.bytesSentMobile, (jlong) trafficStats.bytesReceivedMobile);
|
||||
}
|
||||
|
||||
jobject asJavaFinalState(JNIEnv *env, const FinalState &finalState) {
|
||||
|
@ -210,9 +213,7 @@ jobject asJavaFinalState(JNIEnv *env, const FinalState &finalState) {
|
|||
jstring debugLog = env->NewStringUTF(finalState.debugLog.c_str());
|
||||
jobject trafficStats = asJavaTrafficStats(env, finalState.trafficStats);
|
||||
auto isRatingSuggested = static_cast<jboolean>(finalState.isRatingSuggested);
|
||||
jclass finalStateClass = env->FindClass("org/telegram/messenger/voip/Instance$FinalState");
|
||||
jmethodID finalStateInitMethodId = env->GetMethodID(finalStateClass, "<init>", "([BLjava/lang/String;Lorg/telegram/messenger/voip/Instance$TrafficStats;Z)V");
|
||||
return env->NewObject(finalStateClass, finalStateInitMethodId, persistentState, debugLog, trafficStats, isRatingSuggested);
|
||||
return env->NewObject(FinalStateClass, FinalStateInitMethod, persistentState, debugLog, trafficStats, isRatingSuggested);
|
||||
}
|
||||
|
||||
extern "C" {
|
||||
|
@ -229,6 +230,10 @@ void initWebRTC(JNIEnv *env) {
|
|||
webrtc::JVM::Initialize(vm);
|
||||
rtc::InitializeSSL();
|
||||
webrtcLoaded = true;
|
||||
|
||||
TrafficStatsClass = static_cast<jclass>(env->NewGlobalRef(env->FindClass("org/telegram/messenger/voip/Instance$TrafficStats")));
|
||||
FinalStateClass = static_cast<jclass>(env->NewGlobalRef(env->FindClass("org/telegram/messenger/voip/Instance$FinalState")));
|
||||
FinalStateInitMethod = env->GetMethodID(FinalStateClass, "<init>", "([BLjava/lang/String;Lorg/telegram/messenger/voip/Instance$TrafficStats;Z)V");
|
||||
}
|
||||
|
||||
JNIEXPORT jlong JNICALL Java_org_telegram_messenger_voip_NativeInstance_makeNativeInstance(JNIEnv *env, jclass clazz, jstring version, jobject instanceObj, jobject config, jstring persistentStateFilePath, jobjectArray endpoints, jobject proxyClass, jint networkType, jobject encryptionKey, jobject remoteSink, jlong videoCapturer, jfloat aspectRatio) {
|
||||
|
@ -259,7 +264,7 @@ JNIEXPORT jlong JNICALL Java_org_telegram_messenger_voip_NativeInstance_makeNati
|
|||
.enableVolumeControl = true,
|
||||
.logPath = tgvoip::jni::JavaStringToStdString(env, configObject.getStringField("logPath")),
|
||||
.maxApiLayer = configObject.getIntField("maxApiLayer"),
|
||||
/*.preferredAspectRatio = aspectRatio*/
|
||||
.preferredAspectRatio = aspectRatio
|
||||
},
|
||||
.encryptionKey = EncryptionKey(
|
||||
std::move(encryptionKeyValue),
|
||||
|
@ -332,6 +337,7 @@ JNIEXPORT jlong JNICALL Java_org_telegram_messenger_voip_NativeInstance_makeNati
|
|||
holder->javaInstance = globalRef;
|
||||
holder->_videoCapture = videoCapture;
|
||||
holder->nativeInstance->setIncomingVideoOutput(webrtc::JavaToNativeVideoSink(env, remoteSink));
|
||||
holder->nativeInstance->setNetworkType(parseNetworkType(networkType));
|
||||
return reinterpret_cast<jlong>(holder);
|
||||
}
|
||||
|
||||
|
@ -384,19 +390,16 @@ JNIEXPORT jbyteArray JNICALL Java_org_telegram_messenger_voip_NativeInstance_get
|
|||
return copyVectorToJavaByteArray(env, getInstance(env, obj)->getPersistentState().value);
|
||||
}
|
||||
|
||||
JNIEXPORT jobject JNICALL Java_org_telegram_messenger_voip_NativeInstance_stop(JNIEnv *env, jobject obj) {
|
||||
JNIEXPORT void JNICALL Java_org_telegram_messenger_voip_NativeInstance_stopNative(JNIEnv *env, jobject obj) {
|
||||
InstanceHolder *instance = getInstanceHolder(env, obj);
|
||||
FinalState finalState = instance->nativeInstance->stop();
|
||||
|
||||
// saving persistent state
|
||||
const std::string &path = tgvoip::jni::JavaStringToStdString(env, JavaObject(env, obj).getStringField("persistentStateFilePath"));
|
||||
savePersistentState(path.c_str(), finalState.persistentState);
|
||||
|
||||
// clean
|
||||
env->DeleteGlobalRef(instance->javaInstance);
|
||||
delete instance;
|
||||
|
||||
return asJavaFinalState(env, finalState);
|
||||
instance->nativeInstance->stop([instance](FinalState finalState) {
|
||||
JNIEnv *env = webrtc::AttachCurrentThreadIfNeeded();
|
||||
const std::string &path = tgvoip::jni::JavaStringToStdString(env, JavaObject(env, instance->javaInstance).getStringField("persistentStateFilePath"));
|
||||
savePersistentState(path.c_str(), finalState.persistentState);
|
||||
env->CallVoidMethod(instance->javaInstance, env->GetMethodID(env->GetObjectClass(instance->javaInstance), "onStop", "(Lorg/telegram/messenger/voip/Instance$FinalState;)V"), asJavaFinalState(env, finalState));
|
||||
env->DeleteGlobalRef(instance->javaInstance);
|
||||
delete instance;
|
||||
});
|
||||
}
|
||||
|
||||
JNIEXPORT long JNICALL Java_org_telegram_messenger_voip_NativeInstance_createVideoCapturer(JNIEnv *env, jclass clazz, jobject localSink) {
|
||||
|
|
|
@ -23,10 +23,10 @@ bool CompareFormats(const VideoFormat &a, const VideoFormat &b) {
|
|||
}
|
||||
}
|
||||
|
||||
int FormatPriority(const VideoFormat &format) {
|
||||
int FormatPriority(const VideoFormat &format, const std::vector<std::string> &preferredCodecs) {
|
||||
static const auto kCodecs = {
|
||||
std::string(cricket::kAv1CodecName),
|
||||
std::string(cricket::kVp9CodecName),
|
||||
std::string(cricket::kVp9CodecName),
|
||||
std::string(cricket::kH265CodecName),
|
||||
std::string(cricket::kH264CodecName),
|
||||
std::string(cricket::kVp8CodecName),
|
||||
|
@ -43,8 +43,16 @@ int FormatPriority(const VideoFormat &format) {
|
|||
}
|
||||
return result;
|
||||
}();
|
||||
|
||||
for (int i = 0; i < preferredCodecs.size(); i++) {
|
||||
for (const auto &name : kSupported) {
|
||||
if (absl::EqualsIgnoreCase(format.name, preferredCodecs[i]) && absl::EqualsIgnoreCase(format.name, name)) {
|
||||
return i;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
auto result = 0;
|
||||
auto result = (int)preferredCodecs.size();
|
||||
for (const auto &name : kSupported) {
|
||||
if (absl::EqualsIgnoreCase(format.name, name)) {
|
||||
return result;
|
||||
|
@ -54,17 +62,19 @@ int FormatPriority(const VideoFormat &format) {
|
|||
return -1;
|
||||
}
|
||||
|
||||
bool ComparePriorities(const VideoFormat &a, const VideoFormat &b) {
|
||||
return FormatPriority(a) < FormatPriority(b);
|
||||
bool ComparePriorities(const VideoFormat &a, const VideoFormat &b, const std::vector<std::string> &preferredCodecs) {
|
||||
return FormatPriority(a, preferredCodecs) < FormatPriority(b, preferredCodecs);
|
||||
}
|
||||
|
||||
std::vector<VideoFormat> FilterAndSortEncoders(std::vector<VideoFormat> list) {
|
||||
std::vector<VideoFormat> FilterAndSortEncoders(std::vector<VideoFormat> list, const std::vector<std::string> &preferredCodecs) {
|
||||
const auto listBegin = begin(list);
|
||||
const auto listEnd = end(list);
|
||||
std::sort(listBegin, listEnd, ComparePriorities);
|
||||
std::sort(listBegin, listEnd, [&preferredCodecs](const VideoFormat &lhs, const VideoFormat &rhs) {
|
||||
return ComparePriorities(lhs, rhs, preferredCodecs);
|
||||
});
|
||||
auto eraseFrom = listBegin;
|
||||
auto eraseTill = eraseFrom;
|
||||
while (eraseTill != listEnd && FormatPriority(*eraseTill) == -1) {
|
||||
while (eraseTill != listEnd && FormatPriority(*eraseTill, preferredCodecs) == -1) {
|
||||
++eraseTill;
|
||||
}
|
||||
if (eraseTill != eraseFrom) {
|
||||
|
@ -131,11 +141,12 @@ void AddDefaultFeedbackParams(cricket::VideoCodec *codec) {
|
|||
|
||||
VideoFormatsMessage ComposeSupportedFormats(
|
||||
std::vector<VideoFormat> encoders,
|
||||
std::vector<VideoFormat> decoders) {
|
||||
encoders = FilterAndSortEncoders(std::move(encoders));
|
||||
std::vector<VideoFormat> decoders,
|
||||
const std::vector<std::string> &preferredCodecs) {
|
||||
encoders = FilterAndSortEncoders(std::move(encoders), preferredCodecs);
|
||||
|
||||
auto result = VideoFormatsMessage();
|
||||
result.encodersCount = encoders.size();
|
||||
result.encodersCount = (int)encoders.size();
|
||||
result.formats = AppendUnique(std::move(encoders), std::move(decoders));
|
||||
for (const auto &format : result.formats) {
|
||||
RTC_LOG(LS_INFO) << "Format: " << format.ToString();
|
||||
|
|
|
@ -18,7 +18,8 @@ struct CommonCodecs {
|
|||
|
||||
VideoFormatsMessage ComposeSupportedFormats(
|
||||
std::vector<webrtc::SdpVideoFormat> encoders,
|
||||
std::vector<webrtc::SdpVideoFormat> decoders);
|
||||
std::vector<webrtc::SdpVideoFormat> decoders,
|
||||
const std::vector<std::string> &preferredCodecs);
|
||||
|
||||
CommonFormats ComputeCommonFormats(
|
||||
const VideoFormatsMessage &my,
|
||||
|
|
|
@ -1,5 +1,7 @@
|
|||
#include "CryptoHelper.h"
|
||||
|
||||
#include <cstring>
|
||||
|
||||
namespace tgcalls {
|
||||
|
||||
AesKeyIv PrepareAesKeyIv(const uint8_t *key, const uint8_t *msgKey, int x) {
|
||||
|
|
|
@ -58,6 +58,16 @@ absl::nullopt_t LogError(
|
|||
return absl::nullopt;
|
||||
}
|
||||
|
||||
bool ConstTimeIsDifferent(const void *a, const void *b, size_t size) {
|
||||
auto ca = reinterpret_cast<const char*>(a);
|
||||
auto cb = reinterpret_cast<const char*>(b);
|
||||
volatile auto different = false;
|
||||
for (const auto ce = ca + size; ca != ce; ++ca, ++cb) {
|
||||
different |= (*ca != *cb);
|
||||
}
|
||||
return different;
|
||||
}
|
||||
|
||||
} // namespace
|
||||
|
||||
EncryptedConnection::EncryptedConnection(
|
||||
|
@ -326,7 +336,7 @@ auto EncryptedConnection::handleIncomingPacket(const char *bytes, size_t size)
|
|||
const auto msgKeyLarge = ConcatSHA256(
|
||||
MemorySpan{ key + 88 + x, 32 },
|
||||
MemorySpan{ decryptionBuffer.data(), decryptionBuffer.size() });
|
||||
if (memcmp(msgKeyLarge.data() + 8, msgKey, 16)) {
|
||||
if (ConstTimeIsDifferent(msgKeyLarge.data() + 8, msgKey, 16)) {
|
||||
return LogError("Bad incoming data hash.");
|
||||
}
|
||||
|
||||
|
@ -364,10 +374,16 @@ auto EncryptedConnection::processPacket(
|
|||
}
|
||||
|
||||
if (type == kEmptyId) {
|
||||
if (additionalMessage) {
|
||||
return LogError("Empty message should be only the first one in the packet.");
|
||||
}
|
||||
RTC_LOG(LS_INFO) << logHeader()
|
||||
<< "Got RECV:empty" << "#" << currentCounter;
|
||||
reader.Consume(1);
|
||||
} else if (type == kAckId) {
|
||||
if (!additionalMessage) {
|
||||
return LogError("Ack message must not be the first one in the packet.");
|
||||
}
|
||||
ackMyMessage(currentSeq);
|
||||
reader.Consume(1);
|
||||
} else if (auto message = DeserializeMessage(reader, singleMessagePacket)) {
|
||||
|
|
|
@ -1,7 +1,5 @@
|
|||
#include "Instance.h"
|
||||
|
||||
#include "VideoCaptureInterfaceImpl.h"
|
||||
|
||||
#include <algorithm>
|
||||
#include <stdarg.h>
|
||||
|
||||
|
@ -10,8 +8,8 @@ namespace {
|
|||
|
||||
std::function<void(std::string const &)> globalLoggingFunction;
|
||||
|
||||
std::map<std::string, std::unique_ptr<Meta>> &MetaMap() {
|
||||
static auto result = std::map<std::string, std::unique_ptr<Meta>>();
|
||||
std::map<std::string, std::shared_ptr<Meta>> &MetaMap() {
|
||||
static auto result = std::map<std::string, std::shared_ptr<Meta>>();
|
||||
return result;
|
||||
}
|
||||
|
||||
|
@ -44,10 +42,12 @@ std::unique_ptr<Instance> Meta::Create(
|
|||
: nullptr;
|
||||
}
|
||||
|
||||
void Meta::RegisterOne(std::unique_ptr<Meta> meta) {
|
||||
void Meta::RegisterOne(std::shared_ptr<Meta> meta) {
|
||||
if (meta) {
|
||||
const auto version = meta->version();
|
||||
MetaMap().emplace(version, std::move(meta));
|
||||
const auto versions = meta->versions();
|
||||
for (auto &it : versions) {
|
||||
MetaMap().emplace(it, meta);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -55,6 +55,11 @@ struct Endpoint {
|
|||
unsigned char peerTag[16] = { 0 };
|
||||
};
|
||||
|
||||
enum class ProtocolVersion {
|
||||
V0,
|
||||
V1 // Low-cost network negotiation
|
||||
};
|
||||
|
||||
enum class NetworkType {
|
||||
Unknown,
|
||||
Gprs,
|
||||
|
@ -98,6 +103,8 @@ struct Config {
|
|||
int maxApiLayer = 0;
|
||||
float preferredAspectRatio;
|
||||
bool enableHighBitrateVideo = false;
|
||||
std::vector<std::string> preferredVideoCodecs;
|
||||
ProtocolVersion protocolVersion = ProtocolVersion::V0;
|
||||
};
|
||||
|
||||
struct EncryptionKey {
|
||||
|
@ -174,7 +181,7 @@ public:
|
|||
virtual void receiveSignalingData(const std::vector<uint8_t> &data) = 0;
|
||||
virtual void setVideoCapture(std::shared_ptr<VideoCaptureInterface> videoCapture) = 0;
|
||||
|
||||
virtual FinalState stop() = 0;
|
||||
virtual void stop(std::function<void(FinalState)> completion) = 0;
|
||||
|
||||
};
|
||||
|
||||
|
@ -204,7 +211,7 @@ public:
|
|||
|
||||
virtual std::unique_ptr<Instance> construct(Descriptor &&descriptor) = 0;
|
||||
virtual int connectionMaxLayer() = 0;
|
||||
virtual std::string version() = 0;
|
||||
virtual std::vector<std::string> versions() = 0;
|
||||
|
||||
static std::unique_ptr<Instance> Create(
|
||||
const std::string &version,
|
||||
|
@ -218,7 +225,7 @@ private:
|
|||
|
||||
template <typename Implementation>
|
||||
static bool RegisterOne();
|
||||
static void RegisterOne(std::unique_ptr<Meta> meta);
|
||||
static void RegisterOne(std::shared_ptr<Meta> meta);
|
||||
|
||||
};
|
||||
|
||||
|
@ -229,14 +236,14 @@ bool Meta::RegisterOne() {
|
|||
int connectionMaxLayer() override {
|
||||
return Implementation::GetConnectionMaxLayer();
|
||||
}
|
||||
std::string version() override {
|
||||
return Implementation::GetVersion();
|
||||
std::vector<std::string> versions() override {
|
||||
return Implementation::GetVersions();
|
||||
}
|
||||
std::unique_ptr<Instance> construct(Descriptor &&descriptor) override {
|
||||
return std::make_unique<Implementation>(std::move(descriptor));
|
||||
}
|
||||
};
|
||||
RegisterOne(std::make_unique<MetaImpl>());
|
||||
RegisterOne(std::make_shared<MetaImpl>());
|
||||
return true;
|
||||
}
|
||||
|
||||
|
|
|
@ -29,6 +29,8 @@ InstanceImpl::InstanceImpl(Descriptor &&descriptor)
|
|||
rtc::LogMessage::LogToDebug(rtc::LS_INFO);
|
||||
rtc::LogMessage::SetLogToStderr(false);
|
||||
rtc::LogMessage::AddLogToStream(_logSink.get(), rtc::LS_INFO);
|
||||
|
||||
auto networkType = descriptor.initialNetworkType;
|
||||
|
||||
_manager.reset(new ThreadLocalObject<Manager>(getManagerThread(), [descriptor = std::move(descriptor)]() mutable {
|
||||
return new Manager(getManagerThread(), std::move(descriptor));
|
||||
|
@ -36,6 +38,8 @@ InstanceImpl::InstanceImpl(Descriptor &&descriptor)
|
|||
_manager->perform(RTC_FROM_HERE, [](Manager *manager) {
|
||||
manager->start();
|
||||
});
|
||||
|
||||
setNetworkType(networkType);
|
||||
}
|
||||
|
||||
InstanceImpl::~InstanceImpl() {
|
||||
|
@ -55,51 +59,19 @@ void InstanceImpl::setVideoCapture(std::shared_ptr<VideoCaptureInterface> videoC
|
|||
}
|
||||
|
||||
void InstanceImpl::setNetworkType(NetworkType networkType) {
|
||||
/*message::NetworkType mappedType;
|
||||
|
||||
switch (networkType) {
|
||||
case NetworkType::Unknown:
|
||||
mappedType = message::NetworkType::nUnknown;
|
||||
break;
|
||||
case NetworkType::Gprs:
|
||||
mappedType = message::NetworkType::nGprs;
|
||||
break;
|
||||
case NetworkType::Edge:
|
||||
mappedType = message::NetworkType::nEdge;
|
||||
break;
|
||||
case NetworkType::ThirdGeneration:
|
||||
mappedType = message::NetworkType::n3gOrAbove;
|
||||
break;
|
||||
case NetworkType::Hspa:
|
||||
mappedType = message::NetworkType::n3gOrAbove;
|
||||
break;
|
||||
case NetworkType::Lte:
|
||||
mappedType = message::NetworkType::n3gOrAbove;
|
||||
break;
|
||||
case NetworkType::WiFi:
|
||||
mappedType = message::NetworkType::nHighSpeed;
|
||||
break;
|
||||
case NetworkType::Ethernet:
|
||||
mappedType = message::NetworkType::nHighSpeed;
|
||||
break;
|
||||
case NetworkType::OtherHighSpeed:
|
||||
mappedType = message::NetworkType::nHighSpeed;
|
||||
break;
|
||||
case NetworkType::OtherLowSpeed:
|
||||
mappedType = message::NetworkType::nEdge;
|
||||
break;
|
||||
case NetworkType::OtherMobile:
|
||||
mappedType = message::NetworkType::n3gOrAbove;
|
||||
break;
|
||||
case NetworkType::Dialup:
|
||||
mappedType = message::NetworkType::nGprs;
|
||||
break;
|
||||
default:
|
||||
mappedType = message::NetworkType::nUnknown;
|
||||
break;
|
||||
}
|
||||
|
||||
controller_->SetNetworkType(mappedType);*/
|
||||
bool isLowCostNetwork = false;
|
||||
switch (networkType) {
|
||||
case NetworkType::WiFi:
|
||||
case NetworkType::Ethernet:
|
||||
isLowCostNetwork = true;
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
|
||||
_manager->perform(RTC_FROM_HERE, [isLowCostNetwork](Manager *manager) {
|
||||
manager->setIsLocalNetworkLowCost(isLowCostNetwork);
|
||||
});
|
||||
}
|
||||
|
||||
void InstanceImpl::setMuteMicrophone(bool muteMicrophone) {
|
||||
|
@ -166,12 +138,19 @@ PersistentState InstanceImpl::getPersistentState() {
|
|||
return PersistentState{}; // we dont't have such information
|
||||
}
|
||||
|
||||
FinalState InstanceImpl::stop() {
|
||||
FinalState finalState;
|
||||
finalState.debugLog = _logSink->result();
|
||||
finalState.isRatingSuggested = false;
|
||||
|
||||
return finalState;
|
||||
void InstanceImpl::stop(std::function<void(FinalState)> completion) {
|
||||
std::string debugLog = _logSink->result();
|
||||
|
||||
_manager->perform(RTC_FROM_HERE, [completion, debugLog = std::move(debugLog)](Manager *manager) {
|
||||
manager->getNetworkStats([completion, debugLog = std::move(debugLog)](TrafficStats stats) {
|
||||
FinalState finalState;
|
||||
finalState.debugLog = debugLog;
|
||||
finalState.isRatingSuggested = false;
|
||||
finalState.trafficStats = stats;
|
||||
|
||||
completion(finalState);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/*void InstanceImpl::controllerStateCallback(Controller::State state) {
|
||||
|
@ -201,8 +180,11 @@ int InstanceImpl::GetConnectionMaxLayer() {
|
|||
return 92; // TODO: retrieve from LayerBase
|
||||
}
|
||||
|
||||
std::string InstanceImpl::GetVersion() {
|
||||
return "2.7.7"; // TODO: version not known while not released
|
||||
std::vector<std::string> InstanceImpl::GetVersions() {
|
||||
std::vector<std::string> result;
|
||||
result.push_back("2.7.7");
|
||||
result.push_back("3.0.0");
|
||||
return result;
|
||||
}
|
||||
|
||||
template <>
|
||||
|
|
|
@ -17,7 +17,7 @@ public:
|
|||
~InstanceImpl() override;
|
||||
|
||||
static int GetConnectionMaxLayer();
|
||||
static std::string GetVersion();
|
||||
static std::vector<std::string> GetVersions();
|
||||
|
||||
void receiveSignalingData(const std::vector<uint8_t> &data) override;
|
||||
void setVideoCapture(std::shared_ptr<VideoCaptureInterface> videoCapture) override;
|
||||
|
@ -37,7 +37,7 @@ public:
|
|||
int64_t getPreferredRelayId() override;
|
||||
TrafficStats getTrafficStats() override;
|
||||
PersistentState getPersistentState() override;
|
||||
FinalState stop() override;
|
||||
void stop(std::function<void(FinalState)> completion) override;
|
||||
//void controllerStateCallback(Controller::State state);
|
||||
|
||||
private:
|
||||
|
|
|
@ -5,9 +5,9 @@
|
|||
#ifdef WEBRTC_WIN
|
||||
#include "windows.h"
|
||||
#include <ctime>
|
||||
#elif defined(WEBRTC_IOS) || defined(WEBRTC_MAC)
|
||||
#else // WEBRTC_WIN
|
||||
#include <sys/time.h>
|
||||
#endif //WEBRTC_IOS || WEBRTC_MAC
|
||||
#endif // WEBRTC_WIN
|
||||
|
||||
namespace tgcalls {
|
||||
|
||||
|
|
|
@ -39,6 +39,7 @@ _signaling(
|
|||
_encryptionKey,
|
||||
[=](int delayMs, int cause) { sendSignalingAsync(delayMs, cause); }),
|
||||
_enableP2P(descriptor.config.enableP2P),
|
||||
_protocolVersion(descriptor.config.protocolVersion),
|
||||
_rtcServers(std::move(descriptor.rtcServers)),
|
||||
_videoCapture(std::move(descriptor.videoCapture)),
|
||||
_stateUpdated(std::move(descriptor.stateUpdated)),
|
||||
|
@ -52,6 +53,8 @@ _enableHighBitrateVideo(descriptor.config.enableHighBitrateVideo) {
|
|||
assert(_thread->IsCurrent());
|
||||
assert(_stateUpdated != nullptr);
|
||||
assert(_signalingDataEmitted != nullptr);
|
||||
|
||||
_preferredCodecs = descriptor.config.preferredVideoCodecs;
|
||||
|
||||
_sendSignalingMessage = [=](const Message &message) {
|
||||
if (const auto prepared = _signaling.prepareForSending(message)) {
|
||||
|
@ -112,12 +115,19 @@ void Manager::start() {
|
|||
if (!strong) {
|
||||
return;
|
||||
}
|
||||
const auto mappedState = state.isReadyToSendData
|
||||
? State::Established
|
||||
: State::Reconnecting;
|
||||
State mappedState;
|
||||
if (state.isFailed) {
|
||||
mappedState = State::Failed;
|
||||
} else {
|
||||
mappedState = state.isReadyToSendData
|
||||
? State::Established
|
||||
: State::Reconnecting;
|
||||
}
|
||||
bool isFirstConnection = false;
|
||||
if (state.isReadyToSendData) {
|
||||
if (!strong->_didConnectOnce) {
|
||||
strong->_didConnectOnce = true;
|
||||
isFirstConnection = true;
|
||||
}
|
||||
}
|
||||
strong->_state = mappedState;
|
||||
|
@ -126,6 +136,10 @@ void Manager::start() {
|
|||
strong->_mediaManager->perform(RTC_FROM_HERE, [=](MediaManager *mediaManager) {
|
||||
mediaManager->setIsConnected(state.isReadyToSendData);
|
||||
});
|
||||
|
||||
if (isFirstConnection) {
|
||||
strong->sendInitialSignalingMessages();
|
||||
}
|
||||
});
|
||||
},
|
||||
[=](DecryptedMessage &&message) {
|
||||
|
@ -152,7 +166,7 @@ void Manager::start() {
|
|||
});
|
||||
}));
|
||||
bool isOutgoing = _encryptionKey.isOutgoing;
|
||||
_mediaManager.reset(new ThreadLocalObject<MediaManager>(getMediaThread(), [weak, isOutgoing, thread, sendSignalingMessage, videoCapture = _videoCapture, localPreferredVideoAspectRatio = _localPreferredVideoAspectRatio, enableHighBitrateVideo = _enableHighBitrateVideo, signalBarsUpdated = _signalBarsUpdated]() {
|
||||
_mediaManager.reset(new ThreadLocalObject<MediaManager>(getMediaThread(), [weak, isOutgoing, thread, sendSignalingMessage, videoCapture = _videoCapture, localPreferredVideoAspectRatio = _localPreferredVideoAspectRatio, enableHighBitrateVideo = _enableHighBitrateVideo, signalBarsUpdated = _signalBarsUpdated, preferredCodecs = _preferredCodecs]() {
|
||||
return new MediaManager(
|
||||
getMediaThread(),
|
||||
isOutgoing,
|
||||
|
@ -169,8 +183,12 @@ void Manager::start() {
|
|||
},
|
||||
signalBarsUpdated,
|
||||
localPreferredVideoAspectRatio,
|
||||
enableHighBitrateVideo);
|
||||
enableHighBitrateVideo,
|
||||
preferredCodecs);
|
||||
}));
|
||||
_networkManager->perform(RTC_FROM_HERE, [](NetworkManager *networkManager) {
|
||||
networkManager->start();
|
||||
});
|
||||
_mediaManager->perform(RTC_FROM_HERE, [](MediaManager *mediaManager) {
|
||||
mediaManager->start();
|
||||
});
|
||||
|
@ -208,6 +226,10 @@ void Manager::receiveMessage(DecryptedMessage &&message) {
|
|||
if (_remoteBatteryLevelIsLowUpdated) {
|
||||
_remoteBatteryLevelIsLowUpdated(remoteBatteryLevelIsLow->batteryLow);
|
||||
}
|
||||
} else if (const auto remoteNetworkType = absl::get_if<RemoteNetworkTypeMessage>(data)) {
|
||||
bool wasCurrentNetworkLowCost = calculateIsCurrentNetworkLowCost();
|
||||
_remoteNetworkIsLowCost = remoteNetworkType->isLowCost;
|
||||
updateIsCurrentNetworkLowCost(wasCurrentNetworkLowCost);
|
||||
} else {
|
||||
if (const auto videoParameters = absl::get_if<VideoParametersMessage>(data)) {
|
||||
float value = ((float)videoParameters->aspectRatio) / 1000.0;
|
||||
|
@ -249,4 +271,54 @@ void Manager::setIsLowBatteryLevel(bool isLowBatteryLevel) {
|
|||
_sendTransportMessage({ RemoteBatteryLevelIsLowMessage{ isLowBatteryLevel } });
|
||||
}
|
||||
|
||||
void Manager::setIsLocalNetworkLowCost(bool isLocalNetworkLowCost) {
|
||||
if (isLocalNetworkLowCost != _localNetworkIsLowCost) {
|
||||
_networkManager->perform(RTC_FROM_HERE, [isLocalNetworkLowCost](NetworkManager *networkManager) {
|
||||
networkManager->setIsLocalNetworkLowCost(isLocalNetworkLowCost);
|
||||
});
|
||||
|
||||
bool wasCurrentNetworkLowCost = calculateIsCurrentNetworkLowCost();
|
||||
_localNetworkIsLowCost = isLocalNetworkLowCost;
|
||||
updateIsCurrentNetworkLowCost(wasCurrentNetworkLowCost);
|
||||
|
||||
switch (_protocolVersion) {
|
||||
case ProtocolVersion::V1:
|
||||
if (_didConnectOnce) {
|
||||
_sendTransportMessage({ RemoteNetworkTypeMessage{ isLocalNetworkLowCost } });
|
||||
}
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void Manager::getNetworkStats(std::function<void (TrafficStats)> completion) {
|
||||
_networkManager->perform(RTC_FROM_HERE, [completion = std::move(completion)](NetworkManager *networkManager) {
|
||||
completion(networkManager->getNetworkStats());
|
||||
});
|
||||
}
|
||||
|
||||
bool Manager::calculateIsCurrentNetworkLowCost() const {
|
||||
return _localNetworkIsLowCost && _remoteNetworkIsLowCost;
|
||||
}
|
||||
void Manager::updateIsCurrentNetworkLowCost(bool wasLowCost) {
|
||||
bool isLowCost = calculateIsCurrentNetworkLowCost();
|
||||
if (isLowCost != wasLowCost) {
|
||||
_mediaManager->perform(RTC_FROM_HERE, [isLowCost](MediaManager *mediaManager) {
|
||||
mediaManager->setIsCurrentNetworkLowCost(isLowCost);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
void Manager::sendInitialSignalingMessages() {
|
||||
switch (_protocolVersion) {
|
||||
case ProtocolVersion::V1:
|
||||
_sendTransportMessage({ RemoteNetworkTypeMessage{ _localNetworkIsLowCost } });
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
} // namespace tgcalls
|
||||
|
|
|
@ -22,15 +22,21 @@ public:
|
|||
void setMuteOutgoingAudio(bool mute);
|
||||
void setIncomingVideoOutput(std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink);
|
||||
void setIsLowBatteryLevel(bool isLowBatteryLevel);
|
||||
void setIsLocalNetworkLowCost(bool isLocalNetworkLowCost);
|
||||
void getNetworkStats(std::function<void(TrafficStats)> completion);
|
||||
|
||||
private:
|
||||
void sendSignalingAsync(int delayMs, int cause);
|
||||
void receiveMessage(DecryptedMessage &&message);
|
||||
bool calculateIsCurrentNetworkLowCost() const;
|
||||
void updateIsCurrentNetworkLowCost(bool wasLowCost);
|
||||
void sendInitialSignalingMessages();
|
||||
|
||||
rtc::Thread *_thread;
|
||||
EncryptionKey _encryptionKey;
|
||||
EncryptedConnection _signaling;
|
||||
bool _enableP2P = false;
|
||||
ProtocolVersion _protocolVersion = ProtocolVersion::V0;
|
||||
std::vector<RtcServer> _rtcServers;
|
||||
std::shared_ptr<VideoCaptureInterface> _videoCapture;
|
||||
std::function<void(State)> _stateUpdated;
|
||||
|
@ -47,6 +53,9 @@ private:
|
|||
bool _didConnectOnce = false;
|
||||
float _localPreferredVideoAspectRatio = 0.0f;
|
||||
bool _enableHighBitrateVideo = false;
|
||||
std::vector<std::string> _preferredCodecs;
|
||||
bool _localNetworkIsLowCost = false;
|
||||
bool _remoteNetworkIsLowCost = false;
|
||||
|
||||
};
|
||||
|
||||
|
|
|
@ -58,7 +58,8 @@ MediaManager::MediaManager(
|
|||
std::function<void(Message &&)> sendTransportMessage,
|
||||
std::function<void(int)> signalBarsUpdated,
|
||||
float localPreferredVideoAspectRatio,
|
||||
bool enableHighBitrateVideo) :
|
||||
bool enableHighBitrateVideo,
|
||||
std::vector<std::string> preferredCodecs) :
|
||||
_thread(thread),
|
||||
_eventLog(std::make_unique<webrtc::RtcEventLogNull>()),
|
||||
_taskQueueFactory(webrtc::CreateDefaultTaskQueueFactory()),
|
||||
|
@ -103,7 +104,8 @@ _enableHighBitrateVideo(enableHighBitrateVideo) {
|
|||
|
||||
_myVideoFormats = ComposeSupportedFormats(
|
||||
mediaDeps.video_encoder_factory->GetSupportedFormats(),
|
||||
mediaDeps.video_decoder_factory->GetSupportedFormats());
|
||||
mediaDeps.video_decoder_factory->GetSupportedFormats(),
|
||||
preferredCodecs);
|
||||
|
||||
mediaDeps.audio_processing = webrtc::AudioProcessingBuilder().Create();
|
||||
_mediaEngine = cricket::CreateMediaEngine(std::move(mediaDeps));
|
||||
|
@ -169,7 +171,7 @@ _enableHighBitrateVideo(enableHighBitrateVideo) {
|
|||
|
||||
_videoChannel->SetInterface(_videoNetworkInterface.get());
|
||||
|
||||
adjustBitratePreferences();
|
||||
adjustBitratePreferences(true);
|
||||
}
|
||||
|
||||
void MediaManager::start() {
|
||||
|
@ -379,7 +381,7 @@ void MediaManager::configureSendingVideoIfNeeded() {
|
|||
|
||||
codec.SetParam(cricket::kCodecParamMinBitrate, 64);
|
||||
codec.SetParam(cricket::kCodecParamStartBitrate, 400);
|
||||
codec.SetParam(cricket::kCodecParamMaxBitrate, _enableHighBitrateVideo ? 1600 : 800);
|
||||
codec.SetParam(cricket::kCodecParamMaxBitrate, _enableHighBitrateVideo ? 2000 : 800);
|
||||
|
||||
cricket::VideoSendParameters videoSendParameters;
|
||||
videoSendParameters.codecs.push_back(codec);
|
||||
|
@ -408,7 +410,7 @@ void MediaManager::configureSendingVideoIfNeeded() {
|
|||
_videoChannel->AddSendStream(cricket::StreamParams::CreateLegacy(_ssrcVideo.outgoing));
|
||||
}
|
||||
|
||||
adjustBitratePreferences();
|
||||
adjustBitratePreferences(true);
|
||||
}
|
||||
|
||||
void MediaManager::checkIsSendingVideoChanged(bool wasSending) {
|
||||
|
@ -432,46 +434,42 @@ void MediaManager::checkIsSendingVideoChanged(bool wasSending) {
|
|||
_videoChannel->SetVideoSend(_ssrcVideo.fecOutgoing, NULL, nullptr);
|
||||
}
|
||||
|
||||
adjustBitratePreferences();
|
||||
adjustBitratePreferences(true);
|
||||
}
|
||||
|
||||
void MediaManager::adjustBitratePreferences() {
|
||||
int MediaManager::getMaxVideoBitrate() const {
|
||||
return (_enableHighBitrateVideo && _isLowCostNetwork) ? 2000000 : 800000;
|
||||
}
|
||||
|
||||
void MediaManager::adjustBitratePreferences(bool resetStartBitrate) {
|
||||
if (computeIsSendingVideo()) {
|
||||
webrtc::BitrateConstraints preferences;
|
||||
preferences.min_bitrate_bps = 64000;
|
||||
preferences.start_bitrate_bps = 400000;
|
||||
preferences.max_bitrate_bps = _enableHighBitrateVideo ? 1600000 : 800000;
|
||||
if (resetStartBitrate) {
|
||||
preferences.start_bitrate_bps = 400000;
|
||||
}
|
||||
preferences.max_bitrate_bps = getMaxVideoBitrate();
|
||||
|
||||
_call->GetTransportControllerSend()->SetSdpBitrateParameters(preferences);
|
||||
|
||||
webrtc::BitrateSettings settings;
|
||||
settings.min_bitrate_bps = 64000;
|
||||
settings.start_bitrate_bps = 400000;
|
||||
settings.max_bitrate_bps = _enableHighBitrateVideo ? 1600000 : 800000;
|
||||
|
||||
_call->GetTransportControllerSend()->SetClientBitratePreferences(settings);
|
||||
} else {
|
||||
webrtc::BitrateConstraints preferences;
|
||||
if (_didConfigureVideo) {
|
||||
// After we have configured outgoing video, RTCP stops working for outgoing audio
|
||||
// TODO: investigate
|
||||
preferences.min_bitrate_bps = 16000;
|
||||
preferences.start_bitrate_bps = 16000;
|
||||
if (resetStartBitrate) {
|
||||
preferences.start_bitrate_bps = 16000;
|
||||
}
|
||||
preferences.max_bitrate_bps = 32000;
|
||||
} else {
|
||||
preferences.min_bitrate_bps = 8000;
|
||||
preferences.start_bitrate_bps = 16000;
|
||||
if (resetStartBitrate) {
|
||||
preferences.start_bitrate_bps = 16000;
|
||||
}
|
||||
preferences.max_bitrate_bps = 32000;
|
||||
}
|
||||
|
||||
_call->GetTransportControllerSend()->SetSdpBitrateParameters(preferences);
|
||||
|
||||
webrtc::BitrateSettings settings;
|
||||
settings.min_bitrate_bps = preferences.min_bitrate_bps;
|
||||
settings.start_bitrate_bps = preferences.start_bitrate_bps;
|
||||
settings.max_bitrate_bps = preferences.max_bitrate_bps;
|
||||
|
||||
_call->GetTransportControllerSend()->SetClientBitratePreferences(settings);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -586,6 +584,14 @@ void MediaManager::remoteVideoStateUpdated(VideoState videoState) {
|
|||
}
|
||||
}
|
||||
|
||||
void MediaManager::setIsCurrentNetworkLowCost(bool isCurrentNetworkLowCost) {
|
||||
if (_isLowCostNetwork != isCurrentNetworkLowCost) {
|
||||
_isLowCostNetwork = isCurrentNetworkLowCost;
|
||||
RTC_LOG(LS_INFO) << "MediaManager isLowCostNetwork updated: " << isCurrentNetworkLowCost ? 1 : 0;
|
||||
adjustBitratePreferences(false);
|
||||
}
|
||||
}
|
||||
|
||||
MediaManager::NetworkInterfaceImpl::NetworkInterfaceImpl(MediaManager *mediaManager, bool isVideo) :
|
||||
_mediaManager(mediaManager),
|
||||
_isVideo(isVideo) {
|
||||
|
|
|
@ -42,7 +42,8 @@ public:
|
|||
std::function<void(Message &&)> sendTransportMessage,
|
||||
std::function<void(int)> signalBarsUpdated,
|
||||
float localPreferredVideoAspectRatio,
|
||||
bool enableHighBitrateVideo);
|
||||
bool enableHighBitrateVideo,
|
||||
std::vector<std::string> preferredCodecs);
|
||||
~MediaManager();
|
||||
|
||||
void start();
|
||||
|
@ -53,6 +54,7 @@ public:
|
|||
void setIncomingVideoOutput(std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink);
|
||||
void receiveMessage(DecryptedMessage &&message);
|
||||
void remoteVideoStateUpdated(VideoState videoState);
|
||||
void setIsCurrentNetworkLowCost(bool isCurrentNetworkLowCost);
|
||||
|
||||
private:
|
||||
struct SSRC {
|
||||
|
@ -85,8 +87,9 @@ private:
|
|||
void configureSendingVideoIfNeeded();
|
||||
void checkIsSendingVideoChanged(bool wasSending);
|
||||
bool videoCodecsNegotiated() const;
|
||||
|
||||
void adjustBitratePreferences();
|
||||
|
||||
int getMaxVideoBitrate() const;
|
||||
void adjustBitratePreferences(bool resetStartBitrate);
|
||||
bool computeIsReceivingVideo() const;
|
||||
void checkIsReceivingVideoChanged(bool wasReceiving);
|
||||
|
||||
|
@ -133,6 +136,7 @@ private:
|
|||
float _localPreferredVideoAspectRatio = 0.0f;
|
||||
float _preferredAspectRatio = 0.0f;
|
||||
bool _enableHighBitrateVideo = false;
|
||||
bool _isLowCostNetwork = false;
|
||||
|
||||
std::unique_ptr<MediaManager::NetworkInterfaceImpl> _audioNetworkInterface;
|
||||
std::unique_ptr<MediaManager::NetworkInterfaceImpl> _videoNetworkInterface;
|
||||
|
|
|
@ -264,6 +264,20 @@ bool Deserialize(RemoteBatteryLevelIsLowMessage &to, rtc::ByteBufferReader &read
|
|||
return true;
|
||||
}
|
||||
|
||||
void Serialize(rtc::ByteBufferWriter &to, const RemoteNetworkTypeMessage &from, bool singleMessagePacket) {
|
||||
to.WriteUInt8(from.isLowCost ? 1 : 0);
|
||||
}
|
||||
|
||||
bool Deserialize(RemoteNetworkTypeMessage &to, rtc::ByteBufferReader &reader, bool singleMessagePacket) {
|
||||
uint8_t value = 0;
|
||||
if (!reader.ReadUInt8(&value)) {
|
||||
RTC_LOG(LS_ERROR) << "Could not read isLowCost.";
|
||||
return false;
|
||||
}
|
||||
to.isLowCost = (value != 0);
|
||||
return true;
|
||||
}
|
||||
|
||||
enum class TryResult : uint8_t {
|
||||
Success,
|
||||
TryNext,
|
||||
|
|
|
@ -97,6 +97,13 @@ struct RemoteBatteryLevelIsLowMessage {
|
|||
bool batteryLow = false;
|
||||
};
|
||||
|
||||
struct RemoteNetworkTypeMessage {
|
||||
static constexpr uint8_t kId = 10;
|
||||
static constexpr bool kRequiresAck = true;
|
||||
|
||||
bool isLowCost = false;
|
||||
};
|
||||
|
||||
// To add a new message you should:
|
||||
// 1. Add the message struct.
|
||||
// 2. Add the message to the variant in Message struct.
|
||||
|
@ -112,7 +119,8 @@ struct Message {
|
|||
VideoDataMessage,
|
||||
UnstructuredDataMessage,
|
||||
VideoParametersMessage,
|
||||
RemoteBatteryLevelIsLowMessage> data;
|
||||
RemoteBatteryLevelIsLowMessage,
|
||||
RemoteNetworkTypeMessage> data;
|
||||
};
|
||||
|
||||
rtc::CopyOnWriteBuffer SerializeMessageWithSeq(
|
||||
|
|
|
@ -33,6 +33,8 @@ NetworkManager::NetworkManager(
|
|||
std::function<void(Message &&)> sendSignalingMessage,
|
||||
std::function<void(int delayMs, int cause)> sendTransportServiceAsync) :
|
||||
_thread(thread),
|
||||
_enableP2P(enableP2P),
|
||||
_rtcServers(rtcServers),
|
||||
_transport(
|
||||
EncryptedConnection::Type::Transport,
|
||||
encryptionKey,
|
||||
|
@ -43,24 +45,38 @@ _transportMessageReceived(std::move(transportMessageReceived)),
|
|||
_sendSignalingMessage(std::move(sendSignalingMessage)),
|
||||
_localIceParameters(rtc::CreateRandomString(cricket::ICE_UFRAG_LENGTH), rtc::CreateRandomString(cricket::ICE_PWD_LENGTH)) {
|
||||
assert(_thread->IsCurrent());
|
||||
}
|
||||
|
||||
_socketFactory.reset(new rtc::BasicPacketSocketFactory(_thread));
|
||||
NetworkManager::~NetworkManager() {
|
||||
assert(_thread->IsCurrent());
|
||||
|
||||
RTC_LOG(LS_INFO) << "NetworkManager::~NetworkManager()";
|
||||
|
||||
_networkManager = std::make_unique<rtc::BasicNetworkManager>();
|
||||
_portAllocator.reset(new cricket::BasicPortAllocator(_networkManager.get(), _socketFactory.get(), nullptr, nullptr));
|
||||
_transportChannel.reset();
|
||||
_asyncResolverFactory.reset();
|
||||
_portAllocator.reset();
|
||||
_networkManager.reset();
|
||||
_socketFactory.reset();
|
||||
}
|
||||
|
||||
uint32_t flags = cricket::PORTALLOCATOR_DISABLE_TCP;
|
||||
if (!enableP2P) {
|
||||
flags |= cricket::PORTALLOCATOR_DISABLE_UDP;
|
||||
flags |= cricket::PORTALLOCATOR_DISABLE_STUN;
|
||||
}
|
||||
_portAllocator->set_flags(_portAllocator->flags() | flags);
|
||||
_portAllocator->Initialize();
|
||||
void NetworkManager::start() {
|
||||
_socketFactory.reset(new rtc::BasicPacketSocketFactory(_thread));
|
||||
|
||||
cricket::ServerAddresses stunServers;
|
||||
std::vector<cricket::RelayServerConfig> turnServers;
|
||||
_networkManager = std::make_unique<rtc::BasicNetworkManager>();
|
||||
_portAllocator.reset(new cricket::BasicPortAllocator(_networkManager.get(), _socketFactory.get(), nullptr, nullptr));
|
||||
|
||||
for (auto &server : rtcServers) {
|
||||
uint32_t flags = cricket::PORTALLOCATOR_DISABLE_TCP;
|
||||
if (!_enableP2P) {
|
||||
flags |= cricket::PORTALLOCATOR_DISABLE_UDP;
|
||||
flags |= cricket::PORTALLOCATOR_DISABLE_STUN;
|
||||
}
|
||||
_portAllocator->set_flags(_portAllocator->flags() | flags);
|
||||
_portAllocator->Initialize();
|
||||
|
||||
cricket::ServerAddresses stunServers;
|
||||
std::vector<cricket::RelayServerConfig> turnServers;
|
||||
|
||||
for (auto &server : _rtcServers) {
|
||||
if (server.isTurn) {
|
||||
turnServers.push_back(cricket::RelayServerConfig(
|
||||
rtc::SocketAddress(server.host, server.port),
|
||||
|
@ -74,16 +90,16 @@ _localIceParameters(rtc::CreateRandomString(cricket::ICE_UFRAG_LENGTH), rtc::Cre
|
|||
}
|
||||
}
|
||||
|
||||
_portAllocator->SetConfiguration(stunServers, turnServers, 2, webrtc::NO_PRUNE);
|
||||
_portAllocator->SetConfiguration(stunServers, turnServers, 2, webrtc::NO_PRUNE);
|
||||
|
||||
_asyncResolverFactory = std::make_unique<webrtc::BasicAsyncResolverFactory>();
|
||||
_transportChannel.reset(new cricket::P2PTransportChannel("transport", 0, _portAllocator.get(), _asyncResolverFactory.get(), nullptr));
|
||||
_asyncResolverFactory = std::make_unique<webrtc::BasicAsyncResolverFactory>();
|
||||
_transportChannel.reset(new cricket::P2PTransportChannel("transport", 0, _portAllocator.get(), _asyncResolverFactory.get(), nullptr));
|
||||
|
||||
cricket::IceConfig iceConfig;
|
||||
iceConfig.continual_gathering_policy = cricket::GATHER_CONTINUALLY;
|
||||
cricket::IceConfig iceConfig;
|
||||
iceConfig.continual_gathering_policy = cricket::GATHER_CONTINUALLY;
|
||||
iceConfig.prioritize_most_likely_candidate_pairs = true;
|
||||
iceConfig.regather_on_failed_networks_interval = 8000;
|
||||
_transportChannel->SetIceConfig(iceConfig);
|
||||
_transportChannel->SetIceConfig(iceConfig);
|
||||
|
||||
cricket::IceParameters localIceParameters(
|
||||
_localIceParameters.ufrag,
|
||||
|
@ -91,30 +107,22 @@ _localIceParameters(rtc::CreateRandomString(cricket::ICE_UFRAG_LENGTH), rtc::Cre
|
|||
false
|
||||
);
|
||||
|
||||
_transportChannel->SetIceParameters(localIceParameters);
|
||||
_transportChannel->SetIceRole(_isOutgoing ? cricket::ICEROLE_CONTROLLING : cricket::ICEROLE_CONTROLLED);
|
||||
_transportChannel->SetIceParameters(localIceParameters);
|
||||
_transportChannel->SetIceRole(_isOutgoing ? cricket::ICEROLE_CONTROLLING : cricket::ICEROLE_CONTROLLED);
|
||||
|
||||
_transportChannel->SignalCandidateGathered.connect(this, &NetworkManager::candidateGathered);
|
||||
_transportChannel->SignalGatheringState.connect(this, &NetworkManager::candidateGatheringState);
|
||||
_transportChannel->SignalIceTransportStateChanged.connect(this, &NetworkManager::transportStateChanged);
|
||||
_transportChannel->SignalReadPacket.connect(this, &NetworkManager::transportPacketReceived);
|
||||
_transportChannel->SignalCandidateGathered.connect(this, &NetworkManager::candidateGathered);
|
||||
_transportChannel->SignalGatheringState.connect(this, &NetworkManager::candidateGatheringState);
|
||||
_transportChannel->SignalIceTransportStateChanged.connect(this, &NetworkManager::transportStateChanged);
|
||||
_transportChannel->SignalReadPacket.connect(this, &NetworkManager::transportPacketReceived);
|
||||
_transportChannel->SignalNetworkRouteChanged.connect(this, &NetworkManager::transportRouteChanged);
|
||||
|
||||
_transportChannel->MaybeStartGathering();
|
||||
_transportChannel->MaybeStartGathering();
|
||||
|
||||
_transportChannel->SetRemoteIceMode(cricket::ICEMODE_FULL);
|
||||
}
|
||||
|
||||
NetworkManager::~NetworkManager() {
|
||||
assert(_thread->IsCurrent());
|
||||
_transportChannel->SetRemoteIceMode(cricket::ICEMODE_FULL);
|
||||
|
||||
RTC_LOG(LS_INFO) << "NetworkManager::~NetworkManager()";
|
||||
|
||||
_transportChannel.reset();
|
||||
_asyncResolverFactory.reset();
|
||||
_portAllocator.reset();
|
||||
_networkManager.reset();
|
||||
_socketFactory.reset();
|
||||
_lastNetworkActivityMs = rtc::TimeMillis();
|
||||
|
||||
checkConnectionTimeout();
|
||||
}
|
||||
|
||||
void NetworkManager::receiveSignalingMessage(DecryptedMessage &&message) {
|
||||
|
@ -143,6 +151,7 @@ uint32_t NetworkManager::sendMessage(const Message &message) {
|
|||
if (const auto prepared = _transport.prepareForSending(message)) {
|
||||
rtc::PacketOptions packetOptions;
|
||||
_transportChannel->SendPacket((const char *)prepared->bytes.data(), prepared->bytes.size(), packetOptions, 0);
|
||||
addTrafficStats(prepared->bytes.size(), false);
|
||||
return prepared->counter;
|
||||
}
|
||||
return 0;
|
||||
|
@ -152,9 +161,45 @@ void NetworkManager::sendTransportService(int cause) {
|
|||
if (const auto prepared = _transport.prepareForSendingService(cause)) {
|
||||
rtc::PacketOptions packetOptions;
|
||||
_transportChannel->SendPacket((const char *)prepared->bytes.data(), prepared->bytes.size(), packetOptions, 0);
|
||||
addTrafficStats(prepared->bytes.size(), false);
|
||||
}
|
||||
}
|
||||
|
||||
void NetworkManager::setIsLocalNetworkLowCost(bool isLocalNetworkLowCost) {
|
||||
_isLocalNetworkLowCost = isLocalNetworkLowCost;
|
||||
}
|
||||
|
||||
TrafficStats NetworkManager::getNetworkStats() {
|
||||
TrafficStats stats;
|
||||
stats.bytesSentWifi = _trafficStatsWifi.outgoing;
|
||||
stats.bytesReceivedWifi = _trafficStatsWifi.incoming;
|
||||
stats.bytesSentMobile = _trafficStatsCellular.outgoing;
|
||||
stats.bytesReceivedMobile = _trafficStatsCellular.incoming;
|
||||
return stats;
|
||||
}
|
||||
|
||||
void NetworkManager::checkConnectionTimeout() {
|
||||
const auto weak = std::weak_ptr<NetworkManager>(shared_from_this());
|
||||
_thread->PostDelayedTask(RTC_FROM_HERE, [weak]() {
|
||||
auto strong = weak.lock();
|
||||
if (!strong) {
|
||||
return;
|
||||
}
|
||||
|
||||
int64_t currentTimestamp = rtc::TimeMillis();
|
||||
const int64_t maxTimeout = 20000;
|
||||
|
||||
if (strong->_lastNetworkActivityMs + maxTimeout < currentTimestamp) {
|
||||
NetworkManager::State emitState;
|
||||
emitState.isReadyToSendData = false;
|
||||
emitState.isFailed = true;
|
||||
strong->_stateUpdated(emitState);
|
||||
}
|
||||
|
||||
strong->checkConnectionTimeout();
|
||||
}, 1000);
|
||||
}
|
||||
|
||||
void NetworkManager::candidateGathered(cricket::IceTransportInternal *transport, const cricket::Candidate &candidate) {
|
||||
assert(_thread->IsCurrent());
|
||||
_sendSignalingMessage({ CandidatesListMessage{ { 1, candidate }, _localIceParameters } });
|
||||
|
@ -188,6 +233,10 @@ void NetworkManager::transportReadyToSend(cricket::IceTransportInternal *transpo
|
|||
|
||||
void NetworkManager::transportPacketReceived(rtc::PacketTransportInternal *transport, const char *bytes, size_t size, const int64_t ×tamp, int unused) {
|
||||
assert(_thread->IsCurrent());
|
||||
|
||||
_lastNetworkActivityMs = rtc::TimeMillis();
|
||||
|
||||
addTrafficStats(size, true);
|
||||
|
||||
if (auto decrypted = _transport.handleIncomingPacket(bytes, size)) {
|
||||
if (_transportMessageReceived) {
|
||||
|
@ -212,4 +261,20 @@ void NetworkManager::transportRouteChanged(absl::optional<rtc::NetworkRoute> rou
|
|||
}
|
||||
}
|
||||
|
||||
void NetworkManager::addTrafficStats(int64_t byteCount, bool isIncoming) {
|
||||
if (_isLocalNetworkLowCost) {
|
||||
if (isIncoming) {
|
||||
_trafficStatsWifi.incoming += byteCount;
|
||||
} else {
|
||||
_trafficStatsWifi.outgoing += byteCount;
|
||||
}
|
||||
} else {
|
||||
if (isIncoming) {
|
||||
_trafficStatsCellular.incoming += byteCount;
|
||||
} else {
|
||||
_trafficStatsCellular.outgoing += byteCount;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
} // namespace tgcalls
|
||||
|
|
|
@ -34,11 +34,17 @@ namespace tgcalls {
|
|||
|
||||
struct Message;
|
||||
|
||||
class NetworkManager : public sigslot::has_slots<> {
|
||||
class NetworkManager : public sigslot::has_slots<>, public std::enable_shared_from_this<NetworkManager> {
|
||||
public:
|
||||
struct State {
|
||||
bool isReadyToSendData = false;
|
||||
bool isFailed = false;
|
||||
};
|
||||
|
||||
struct InterfaceTrafficStats {
|
||||
int64_t incoming = 0;
|
||||
int64_t outgoing = 0;
|
||||
};
|
||||
|
||||
NetworkManager(
|
||||
rtc::Thread *thread,
|
||||
|
@ -51,19 +57,26 @@ public:
|
|||
std::function<void(int delayMs, int cause)> sendTransportServiceAsync);
|
||||
~NetworkManager();
|
||||
|
||||
void start();
|
||||
void receiveSignalingMessage(DecryptedMessage &&message);
|
||||
uint32_t sendMessage(const Message &message);
|
||||
void sendTransportService(int cause);
|
||||
void setIsLocalNetworkLowCost(bool isLocalNetworkLowCost);
|
||||
TrafficStats getNetworkStats();
|
||||
|
||||
private:
|
||||
void checkConnectionTimeout();
|
||||
void candidateGathered(cricket::IceTransportInternal *transport, const cricket::Candidate &candidate);
|
||||
void candidateGatheringState(cricket::IceTransportInternal *transport);
|
||||
void transportStateChanged(cricket::IceTransportInternal *transport);
|
||||
void transportReadyToSend(cricket::IceTransportInternal *transport);
|
||||
void transportPacketReceived(rtc::PacketTransportInternal *transport, const char *bytes, size_t size, const int64_t ×tamp, int unused);
|
||||
void transportRouteChanged(absl::optional<rtc::NetworkRoute> route);
|
||||
void addTrafficStats(int64_t byteCount, bool isIncoming);
|
||||
|
||||
rtc::Thread *_thread = nullptr;
|
||||
bool _enableP2P = false;
|
||||
std::vector<RtcServer> _rtcServers;
|
||||
EncryptedConnection _transport;
|
||||
bool _isOutgoing = false;
|
||||
std::function<void(const NetworkManager::State &)> _stateUpdated;
|
||||
|
@ -78,6 +91,11 @@ private:
|
|||
|
||||
PeerIceParameters _localIceParameters;
|
||||
absl::optional<PeerIceParameters> _remoteIceParameters;
|
||||
|
||||
bool _isLocalNetworkLowCost = false;
|
||||
int64_t _lastNetworkActivityMs = 0;
|
||||
InterfaceTrafficStats _trafficStatsWifi;
|
||||
InterfaceTrafficStats _trafficStatsCellular;
|
||||
};
|
||||
|
||||
} // namespace tgcalls
|
||||
|
|
|
@ -11,15 +11,17 @@ VideoCaptureInterfaceObject::VideoCaptureInterfaceObject(std::shared_ptr<Platfor
|
|||
_videoSource = PlatformInterface::SharedInstance()->makeVideoSource(Manager::getMediaThread(), MediaManager::getWorkerThread());
|
||||
_platformContext = platformContext;
|
||||
//this should outlive the capturer
|
||||
_videoCapturer = PlatformInterface::SharedInstance()->makeVideoCapturer(_videoSource, _useFrontCamera, [this](VideoState state) {
|
||||
if (this->_stateUpdated) {
|
||||
this->_stateUpdated(state);
|
||||
}
|
||||
}, platformContext);
|
||||
if (_videoSource) {
|
||||
_videoCapturer = PlatformInterface::SharedInstance()->makeVideoCapturer(_videoSource, _useFrontCamera, [this](VideoState state) {
|
||||
if (this->_stateUpdated) {
|
||||
this->_stateUpdated(state);
|
||||
}
|
||||
}, platformContext, _videoCapturerResolution);
|
||||
}
|
||||
}
|
||||
|
||||
VideoCaptureInterfaceObject::~VideoCaptureInterfaceObject() {
|
||||
if (_currentUncroppedSink != nullptr) {
|
||||
if (_videoCapturer && _currentUncroppedSink != nullptr) {
|
||||
//_videoSource->RemoveSink(_currentSink.get());
|
||||
_videoCapturer->setUncroppedOutput(nullptr);
|
||||
}
|
||||
|
@ -30,30 +32,52 @@ void VideoCaptureInterfaceObject::switchCamera() {
|
|||
if (_videoCapturer && _currentUncroppedSink) {
|
||||
_videoCapturer->setUncroppedOutput(nullptr);
|
||||
}
|
||||
_videoCapturer = PlatformInterface::SharedInstance()->makeVideoCapturer(_videoSource, _useFrontCamera, [this](VideoState state) {
|
||||
if (this->_stateUpdated) {
|
||||
this->_stateUpdated(state);
|
||||
if (_videoSource) {
|
||||
_videoCapturer = PlatformInterface::SharedInstance()->makeVideoCapturer(_videoSource, _useFrontCamera, [this](VideoState state) {
|
||||
if (this->_stateUpdated) {
|
||||
this->_stateUpdated(state);
|
||||
}
|
||||
}, _platformContext, _videoCapturerResolution);
|
||||
}
|
||||
if (_videoCapturer) {
|
||||
if (_currentUncroppedSink) {
|
||||
_videoCapturer->setUncroppedOutput(_currentUncroppedSink);
|
||||
}
|
||||
}, _platformContext);
|
||||
if (_currentUncroppedSink) {
|
||||
_videoCapturer->setUncroppedOutput(_currentUncroppedSink);
|
||||
}
|
||||
_videoCapturer->setState(_state);
|
||||
_videoCapturer->setState(_state);
|
||||
}
|
||||
}
|
||||
|
||||
void VideoCaptureInterfaceObject::setState(VideoState state) {
|
||||
if (_state != state) {
|
||||
_state = state;
|
||||
_videoCapturer->setState(state);
|
||||
if (_videoCapturer) {
|
||||
_videoCapturer->setState(state);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void VideoCaptureInterfaceObject::setPreferredAspectRatio(float aspectRatio) {
|
||||
_videoCapturer->setPreferredCaptureAspectRatio(aspectRatio);
|
||||
if (_videoCapturer) {
|
||||
if (aspectRatio > 0.01 && _videoCapturerResolution.first != 0 && _videoCapturerResolution.second != 0) {
|
||||
float originalWidth = (float)_videoCapturerResolution.first;
|
||||
float originalHeight = (float)_videoCapturerResolution.second;
|
||||
|
||||
float width = (originalWidth > aspectRatio * originalHeight)
|
||||
? int(std::round(aspectRatio * originalHeight))
|
||||
: originalWidth;
|
||||
float height = (originalWidth > aspectRatio * originalHeight)
|
||||
? originalHeight
|
||||
: int(std::round(originalHeight / aspectRatio));
|
||||
|
||||
PlatformInterface::SharedInstance()->adaptVideoSource(_videoSource, (int)width, (int)height, 30);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void VideoCaptureInterfaceObject::setOutput(std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink) {
|
||||
_videoCapturer->setUncroppedOutput(sink);
|
||||
if (_videoCapturer) {
|
||||
_videoCapturer->setUncroppedOutput(sink);
|
||||
}
|
||||
_currentUncroppedSink = sink;
|
||||
}
|
||||
|
||||
|
|
|
@ -28,6 +28,7 @@ public:
|
|||
private:
|
||||
std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> _currentUncroppedSink;
|
||||
std::shared_ptr<PlatformContext> _platformContext;
|
||||
std::pair<int, int> _videoCapturerResolution;
|
||||
std::unique_ptr<VideoCapturerInterface> _videoCapturer;
|
||||
std::function<void(VideoState)> _stateUpdated;
|
||||
bool _useFrontCamera = true;
|
||||
|
|
|
@ -155,8 +155,8 @@ onSignalBarsUpdated_(std::move(descriptor.signalBarsUpdated)) {
|
|||
|
||||
InstanceImplLegacy::~InstanceImplLegacy() {
|
||||
if (controller_) {
|
||||
stop();
|
||||
}
|
||||
stop([](FinalState state){});
|
||||
}
|
||||
}
|
||||
|
||||
void InstanceImplLegacy::setNetworkType(NetworkType networkType) {
|
||||
|
@ -273,7 +273,7 @@ PersistentState InstanceImplLegacy::getPersistentState() {
|
|||
return {controller_->GetPersistentState()};
|
||||
}
|
||||
|
||||
FinalState InstanceImplLegacy::stop() {
|
||||
void InstanceImplLegacy::stop(std::function<void(FinalState)> completion) {
|
||||
controller_->Stop();
|
||||
|
||||
auto result = FinalState();
|
||||
|
@ -285,7 +285,7 @@ FinalState InstanceImplLegacy::stop() {
|
|||
delete controller_;
|
||||
controller_ = nullptr;
|
||||
|
||||
return result;
|
||||
completion(result);
|
||||
}
|
||||
|
||||
void InstanceImplLegacy::ControllerStateCallback(tgvoip::VoIPController *controller, int state) {
|
||||
|
@ -323,8 +323,10 @@ int InstanceImplLegacy::GetConnectionMaxLayer() {
|
|||
return tgvoip::VoIPController::GetConnectionMaxLayer();
|
||||
}
|
||||
|
||||
std::string InstanceImplLegacy::GetVersion() {
|
||||
return tgvoip::VoIPController::GetVersion();
|
||||
std::vector<std::string> InstanceImplLegacy::GetVersions() {
|
||||
std::vector<std::string> result;
|
||||
result.push_back("2.4.4");
|
||||
return result;
|
||||
}
|
||||
|
||||
template <>
|
||||
|
|
|
@ -14,7 +14,7 @@ public:
|
|||
~InstanceImplLegacy();
|
||||
|
||||
static int GetConnectionMaxLayer();
|
||||
static std::string GetVersion();
|
||||
static std::vector<std::string> GetVersions();
|
||||
|
||||
void receiveSignalingData(const std::vector<uint8_t> &data) override;
|
||||
void setNetworkType(NetworkType networkType) override;
|
||||
|
@ -35,7 +35,7 @@ public:
|
|||
int64_t getPreferredRelayId() override;
|
||||
TrafficStats getTrafficStats() override;
|
||||
PersistentState getPersistentState() override;
|
||||
FinalState stop() override;
|
||||
void stop(std::function<void(FinalState)> completion) override;
|
||||
|
||||
private:
|
||||
tgvoip::VoIPController *controller_;
|
||||
|
|
|
@ -27,7 +27,8 @@ public:
|
|||
virtual std::unique_ptr<webrtc::VideoDecoderFactory> makeVideoDecoderFactory() = 0;
|
||||
virtual bool supportsEncoding(const std::string &codecName) = 0;
|
||||
virtual rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> makeVideoSource(rtc::Thread *signalingThread, rtc::Thread *workerThread) = 0;
|
||||
virtual std::unique_ptr<VideoCapturerInterface> makeVideoCapturer(rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> source, bool useFrontCamera, std::function<void(VideoState)> stateUpdated, std::shared_ptr<PlatformContext> platformContext) = 0;
|
||||
virtual void adaptVideoSource(rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> videoSource, int width, int height, int fps) = 0;
|
||||
virtual std::unique_ptr<VideoCapturerInterface> makeVideoCapturer(rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> source, bool useFrontCamera, std::function<void(VideoState)> stateUpdated, std::shared_ptr<PlatformContext> platformContext, std::pair<int, int> &outResolution) = 0;
|
||||
|
||||
};
|
||||
|
||||
|
|
|
@ -20,6 +20,14 @@
|
|||
|
||||
namespace tgcalls {
|
||||
|
||||
void AndroidInterface::configurePlatformAudio() {
|
||||
|
||||
}
|
||||
|
||||
float AndroidInterface::getDisplayAspectRatio() {
|
||||
return 0;
|
||||
}
|
||||
|
||||
std::unique_ptr<webrtc::VideoEncoderFactory> AndroidInterface::makeVideoEncoderFactory() {
|
||||
JNIEnv *env = webrtc::AttachCurrentThreadIfNeeded();
|
||||
webrtc::ScopedJavaLocalRef<jclass> factory_class =
|
||||
|
@ -46,6 +54,10 @@ std::unique_ptr<webrtc::VideoDecoderFactory> AndroidInterface::makeVideoDecoderF
|
|||
return webrtc::JavaToNativeVideoDecoderFactory(env, factory_object.obj());
|
||||
}
|
||||
|
||||
void AndroidInterface::adaptVideoSource(rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> videoSource, int width, int height, int fps) {
|
||||
|
||||
}
|
||||
|
||||
rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> AndroidInterface::makeVideoSource(rtc::Thread *signalingThread, rtc::Thread *workerThread) {
|
||||
JNIEnv *env = webrtc::AttachCurrentThreadIfNeeded();
|
||||
_source = webrtc::CreateJavaVideoSource(env, signalingThread, false, false);
|
||||
|
@ -75,7 +87,7 @@ bool AndroidInterface::supportsEncoding(const std::string &codecName) {
|
|||
return codecName == cricket::kVp8CodecName;
|
||||
}
|
||||
|
||||
std::unique_ptr<VideoCapturerInterface> AndroidInterface::makeVideoCapturer(rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> source, bool useFrontCamera, std::function<void(VideoState)> stateUpdated, std::shared_ptr<PlatformContext> platformContext) {
|
||||
std::unique_ptr<VideoCapturerInterface> AndroidInterface::makeVideoCapturer(rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> source, bool useFrontCamera, std::function<void(VideoState)> stateUpdated, std::shared_ptr<PlatformContext> platformContext, std::pair<int, int> &outResolution) {
|
||||
return std::make_unique<VideoCapturerInterfaceImpl>(_source, useFrontCamera, stateUpdated, platformContext);
|
||||
}
|
||||
|
||||
|
|
|
@ -9,11 +9,14 @@ namespace tgcalls {
|
|||
|
||||
class AndroidInterface : public PlatformInterface {
|
||||
public:
|
||||
void configurePlatformAudio() override;
|
||||
float getDisplayAspectRatio() override;
|
||||
std::unique_ptr<webrtc::VideoEncoderFactory> makeVideoEncoderFactory() override;
|
||||
std::unique_ptr<webrtc::VideoDecoderFactory> makeVideoDecoderFactory() override;
|
||||
bool supportsEncoding(const std::string &codecName) override;
|
||||
rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> makeVideoSource(rtc::Thread *signalingThread, rtc::Thread *workerThread) override;
|
||||
std::unique_ptr<VideoCapturerInterface> makeVideoCapturer(rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> source, bool useFrontCamera, std::function<void(VideoState)> stateUpdated, std::shared_ptr<PlatformContext> platformContext) override;
|
||||
void adaptVideoSource(rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> videoSource, int width, int height, int fps) override;
|
||||
std::unique_ptr<VideoCapturerInterface> makeVideoCapturer(rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> source, bool useFrontCamera, std::function<void(VideoState)> stateUpdated, std::shared_ptr<PlatformContext> platformContext, std::pair<int, int> &outResolution) override;
|
||||
|
||||
private:
|
||||
rtc::scoped_refptr<webrtc::JavaVideoTrackSourceInterface> _source;
|
||||
|
|
|
@ -13,7 +13,8 @@ public:
|
|||
std::unique_ptr<webrtc::VideoDecoderFactory> makeVideoDecoderFactory() override;
|
||||
bool supportsEncoding(const std::string &codecName) override;
|
||||
rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> makeVideoSource(rtc::Thread *signalingThread, rtc::Thread *workerThread) override;
|
||||
std::unique_ptr<VideoCapturerInterface> makeVideoCapturer(rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> source, bool useFrontCamera, std::function<void(VideoState)> stateUpdated, std::shared_ptr<PlatformContext> platformContext) override;
|
||||
virtual void adaptVideoSource(rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> videoSource, int width, int height, int fps) override;
|
||||
std::unique_ptr<VideoCapturerInterface> makeVideoCapturer(rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> source, bool useFrontCamera, std::function<void(VideoState)> stateUpdated, std::shared_ptr<PlatformContext> platformContext, std::pair<int, int> &outResolution) override;
|
||||
|
||||
};
|
||||
|
||||
|
|
|
@ -18,6 +18,12 @@
|
|||
|
||||
namespace tgcalls {
|
||||
|
||||
static webrtc::ObjCVideoTrackSource *getObjCVideoSource(const rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> nativeSource) {
|
||||
webrtc::VideoTrackSourceProxy *proxy_source =
|
||||
static_cast<webrtc::VideoTrackSourceProxy *>(nativeSource.get());
|
||||
return static_cast<webrtc::ObjCVideoTrackSource *>(proxy_source->internal());
|
||||
}
|
||||
|
||||
void DarwinInterface::configurePlatformAudio() {
|
||||
#ifdef WEBRTC_IOS
|
||||
[RTCAudioSession sharedInstance].useManualAudio = true;
|
||||
|
@ -45,7 +51,7 @@ bool DarwinInterface::supportsEncoding(const std::string &codecName) {
|
|||
return [[AVAssetExportSession allExportPresets] containsObject:AVAssetExportPresetHEVCHighestQuality];
|
||||
}
|
||||
#elif defined WEBRTC_MAC // WEBRTC_IOS
|
||||
if (@available(macOS 10.13, *)) {
|
||||
if (@available(macOS 10.14, *)) {
|
||||
return [[AVAssetExportSession allExportPresets] containsObject:AVAssetExportPresetHEVCHighestQuality];
|
||||
}
|
||||
#endif // WEBRTC_IOS || WEBRTC_MAC
|
||||
|
@ -54,11 +60,7 @@ bool DarwinInterface::supportsEncoding(const std::string &codecName) {
|
|||
} else if (codecName == cricket::kVp8CodecName) {
|
||||
return true;
|
||||
} else if (codecName == cricket::kVp9CodecName) {
|
||||
#ifndef WEBRTC_IOS
|
||||
return true;
|
||||
#else
|
||||
return false;
|
||||
#endif
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
@ -68,8 +70,12 @@ rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> DarwinInterface::makeVideo
|
|||
return webrtc::VideoTrackSourceProxy::Create(signalingThread, workerThread, objCVideoTrackSource);
|
||||
}
|
||||
|
||||
std::unique_ptr<VideoCapturerInterface> DarwinInterface::makeVideoCapturer(rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> source, bool useFrontCamera, std::function<void(VideoState)> stateUpdated, std::shared_ptr<PlatformContext> platformContext) {
|
||||
return std::make_unique<VideoCapturerInterfaceImpl>(source, useFrontCamera, stateUpdated);
|
||||
void DarwinInterface::adaptVideoSource(rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> videoSource, int width, int height, int fps) {
|
||||
getObjCVideoSource(videoSource)->OnOutputFormatRequest(width, height, fps);
|
||||
}
|
||||
|
||||
std::unique_ptr<VideoCapturerInterface> DarwinInterface::makeVideoCapturer(rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> source, bool useFrontCamera, std::function<void(VideoState)> stateUpdated, std::shared_ptr<PlatformContext> platformContext, std::pair<int, int> &outResolution) {
|
||||
return std::make_unique<VideoCapturerInterfaceImpl>(source, useFrontCamera, stateUpdated, outResolution);
|
||||
}
|
||||
|
||||
std::unique_ptr<PlatformInterface> CreatePlatformInterface() {
|
||||
|
|
|
@ -9,7 +9,11 @@
|
|||
*/
|
||||
|
||||
#import <Foundation/Foundation.h>
|
||||
#ifdef WEBRTC_IOS
|
||||
#import <UIKit/UIKit.h>
|
||||
#else
|
||||
#import <AppKit/AppKit.h>
|
||||
#endif
|
||||
|
||||
#import "RTCMacros.h"
|
||||
#import "RTCVideoRenderer.h"
|
||||
|
@ -28,8 +32,13 @@ NS_ASSUME_NONNULL_BEGIN
|
|||
* bounds using OpenGLES 2.0 or OpenGLES 3.0.
|
||||
*/
|
||||
RTC_OBJC_EXPORT
|
||||
NS_EXTENSION_UNAVAILABLE_IOS("Rendering not available in app extensions.")
|
||||
@interface GLVideoView : UIView <RTCVideoRenderer>
|
||||
@interface GLVideoView :
|
||||
#ifdef WEBRTC_IOS
|
||||
UIView
|
||||
#else
|
||||
NSView
|
||||
#endif
|
||||
<RTCVideoRenderer>
|
||||
|
||||
@property(nonatomic, weak) id<RTCVideoViewDelegate> delegate;
|
||||
|
||||
|
|
50
TMessagesProj/jni/tgcalls/platform/darwin/GLVideoViewMac.h
Normal file
50
TMessagesProj/jni/tgcalls/platform/darwin/GLVideoViewMac.h
Normal file
|
@ -0,0 +1,50 @@
|
|||
/*
|
||||
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#import <Foundation/Foundation.h>
|
||||
|
||||
#if !TARGET_OS_IPHONE
|
||||
|
||||
#import <AppKit/NSOpenGLView.h>
|
||||
#import "api/media_stream_interface.h"
|
||||
#import "RTCVideoRenderer.h"
|
||||
#import "RTCVideoViewShading.h"
|
||||
|
||||
NS_ASSUME_NONNULL_BEGIN
|
||||
|
||||
@class GLVideoView;
|
||||
|
||||
@protocol GLVideoViewDelegate<RTCVideoViewDelegate> @end
|
||||
|
||||
@interface GLVideoView : NSView <RTCVideoRenderer>
|
||||
|
||||
@property(nonatomic, weak) id<GLVideoViewDelegate> delegate;
|
||||
|
||||
- (instancetype)initWithFrame:(NSRect)frameRect
|
||||
pixelFormat:(NSOpenGLPixelFormat *)format
|
||||
shader:(id<RTCVideoViewShading>)shader
|
||||
NS_DESIGNATED_INITIALIZER;
|
||||
|
||||
|
||||
@property(nonatomic, nullable) NSValue *rotationOverride;
|
||||
|
||||
@property (nonatomic, readwrite) int internalOrientation;
|
||||
|
||||
- (std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>>)getSink;
|
||||
- (void)setOnFirstFrameReceived:(void (^ _Nullable)(float))onFirstFrameReceived;
|
||||
- (void)internalSetOnOrientationUpdated:(void (^ _Nullable)(int))onOrientationUpdated;
|
||||
- (void)internalSetOnIsMirroredUpdated:(void (^ _Nullable)(bool))onIsMirroredUpdated;
|
||||
- (void)setVideoContentMode:(CALayerContentsGravity)mode;
|
||||
- (void)setIsForceMirrored:(BOOL)forceMirrored;
|
||||
@end
|
||||
|
||||
NS_ASSUME_NONNULL_END
|
||||
|
||||
#endif
|
491
TMessagesProj/jni/tgcalls/platform/darwin/GLVideoViewMac.mm
Normal file
491
TMessagesProj/jni/tgcalls/platform/darwin/GLVideoViewMac.mm
Normal file
|
@ -0,0 +1,491 @@
|
|||
/*
|
||||
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#import "GLVideoViewMac.h"
|
||||
|
||||
#import "TGRTCCVPixelBuffer.h"
|
||||
|
||||
#import <GLKit/GLKit.h>
|
||||
|
||||
#import "RTCDefaultShader.h"
|
||||
#import "RTCDisplayLinkTimer.h"
|
||||
#import "RTCI420TextureCache.h"
|
||||
#import "base/RTCLogging.h"
|
||||
#import "base/RTCVideoFrame.h"
|
||||
#import "base/RTCVideoFrameBuffer.h"
|
||||
#import "components/video_frame_buffer/RTCCVPixelBuffer.h"
|
||||
#include "sdk/objc/native/api/video_frame.h"
|
||||
#import "rtc_base/time_utils.h"
|
||||
#include "sdk/objc/native/src/objc_frame_buffer.h"
|
||||
|
||||
namespace {
|
||||
|
||||
static RTCVideoFrame *customToObjCVideoFrame(const webrtc::VideoFrame &frame, RTCVideoRotation &rotation) {
|
||||
rotation = RTCVideoRotation(frame.rotation());
|
||||
RTCVideoFrame *videoFrame =
|
||||
[[RTCVideoFrame alloc] initWithBuffer:webrtc::ToObjCVideoFrameBuffer(frame.video_frame_buffer())
|
||||
rotation:rotation
|
||||
timeStampNs:frame.timestamp_us() * rtc::kNumNanosecsPerMicrosec];
|
||||
videoFrame.timeStamp = frame.timestamp();
|
||||
|
||||
return videoFrame;
|
||||
}
|
||||
|
||||
class VideoRendererAdapterImpl : public rtc::VideoSinkInterface<webrtc::VideoFrame> {
|
||||
public:
|
||||
VideoRendererAdapterImpl(void (^frameReceived)(CGSize, RTCVideoFrame *, RTCVideoRotation)) {
|
||||
_frameReceived = [frameReceived copy];
|
||||
}
|
||||
|
||||
void OnFrame(const webrtc::VideoFrame& nativeVideoFrame) override {
|
||||
RTCVideoRotation rotation = RTCVideoRotation_0;
|
||||
RTCVideoFrame* videoFrame = customToObjCVideoFrame(nativeVideoFrame, rotation);
|
||||
|
||||
CGSize currentSize = (videoFrame.rotation % 180 == 0) ? CGSizeMake(videoFrame.width, videoFrame.height) : CGSizeMake(videoFrame.height, videoFrame.width);
|
||||
|
||||
if (_frameReceived) {
|
||||
_frameReceived(currentSize, videoFrame, rotation);
|
||||
}
|
||||
}
|
||||
|
||||
private:
|
||||
void (^_frameReceived)(CGSize, RTCVideoFrame *, RTCVideoRotation);
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
static CGSize scaleToFillSize(CGSize size, CGSize maxSize) {
|
||||
if (size.width < 1.0f) {
|
||||
size.width = 1.0f;
|
||||
}
|
||||
if (size.height < 1.0f) {
|
||||
size.height = 1.0f;
|
||||
}
|
||||
if (size.width < maxSize.width) {
|
||||
size.height = floor(maxSize.width * size.height / MAX(1.0f, size.width));
|
||||
size.width = maxSize.width;
|
||||
}
|
||||
if (size.height < maxSize.height) {
|
||||
size.width = floor(maxSize.height * size.width / MAX(1.0f, size.height));
|
||||
size.height = maxSize.height;
|
||||
}
|
||||
return size;
|
||||
}
|
||||
|
||||
static CGSize aspectFilled(CGSize from, CGSize to) {
|
||||
CGFloat scale = MAX(from.width / MAX(1.0, to.width), from.height / MAX(1.0, to.height));
|
||||
return NSMakeSize(ceil(to.width * scale), ceil(to.height * scale));
|
||||
}
|
||||
static CGSize aspectFitted(CGSize from, CGSize to) {
|
||||
CGFloat scale = MAX(from.width / MAX(1.0, to.width), from.height / MAX(1.0, to.height));
|
||||
return NSMakeSize(ceil(to.width * scale), ceil(to.height * scale));
|
||||
}
|
||||
|
||||
/*
|
||||
|
||||
func aspectFilled(_ size: CGSize) -> CGSize {
|
||||
let scale = max(size.width / max(1.0, self.width), size.height / max(1.0, self.height))
|
||||
return CGSize(width: ceil(self.width * scale), height: ceil(self.height * scale))
|
||||
}
|
||||
func fittedToWidthOrSmaller(_ width: CGFloat) -> CGSize {
|
||||
let scale = min(1.0, width / max(1.0, self.width))
|
||||
return CGSize(width: floor(self.width * scale), height: floor(self.height * scale))
|
||||
}
|
||||
|
||||
func aspectFitted(_ size: CGSize) -> CGSize {
|
||||
let scale = min(size.width / max(1.0, self.width), size.height / max(1.0, self.height))
|
||||
return CGSize(width: ceil(self.width * scale), height: ceil(self.height * scale))
|
||||
}
|
||||
*/
|
||||
|
||||
|
||||
#if !TARGET_OS_IPHONE
|
||||
|
||||
@interface OpenGLVideoView : NSOpenGLView
|
||||
@property(atomic, strong) RTCVideoFrame *videoFrame;
|
||||
@property(atomic, strong) RTCI420TextureCache *i420TextureCache;
|
||||
|
||||
- (void)drawFrame;
|
||||
- (instancetype)initWithFrame:(NSRect)frame
|
||||
pixelFormat:(NSOpenGLPixelFormat *)format
|
||||
shader:(id<RTCVideoViewShading>)shader;
|
||||
@end
|
||||
|
||||
static CVReturn OnDisplayLinkFired(CVDisplayLinkRef displayLink,
|
||||
const CVTimeStamp *now,
|
||||
const CVTimeStamp *outputTime,
|
||||
CVOptionFlags flagsIn,
|
||||
CVOptionFlags *flagsOut,
|
||||
void *displayLinkContext) {
|
||||
OpenGLVideoView *view =
|
||||
(__bridge OpenGLVideoView *)displayLinkContext;
|
||||
[view drawFrame];
|
||||
return kCVReturnSuccess;
|
||||
}
|
||||
|
||||
|
||||
@implementation OpenGLVideoView {
|
||||
CVDisplayLinkRef _displayLink;
|
||||
RTCVideoFrame * _lastDrawnFrame;
|
||||
id<RTCVideoViewShading> _shader;
|
||||
|
||||
int64_t _lastDrawnFrameTimeStampNs;
|
||||
void (^_onFirstFrameReceived)(float);
|
||||
bool _firstFrameReceivedReported;
|
||||
}
|
||||
|
||||
@synthesize videoFrame = _videoFrame;
|
||||
@synthesize i420TextureCache = _i420TextureCache;
|
||||
|
||||
- (instancetype)initWithFrame:(NSRect)frame
|
||||
pixelFormat:(NSOpenGLPixelFormat *)format
|
||||
shader:(id<RTCVideoViewShading>)shader {
|
||||
if (self = [super initWithFrame:frame pixelFormat:format]) {
|
||||
self->_shader = shader;
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (void)reshape {
|
||||
[super reshape];
|
||||
NSRect frame = [self frame];
|
||||
[self ensureGLContext];
|
||||
CGLLockContext([[self openGLContext] CGLContextObj]);
|
||||
glViewport(0, 0, frame.size.width, frame.size.height);
|
||||
CGLUnlockContext([[self openGLContext] CGLContextObj]);
|
||||
}
|
||||
|
||||
- (void)lockFocus {
|
||||
NSOpenGLContext *context = [self openGLContext];
|
||||
[super lockFocus];
|
||||
if ([context view] != self) {
|
||||
[context setView:self];
|
||||
}
|
||||
[context makeCurrentContext];
|
||||
}
|
||||
|
||||
- (void)prepareOpenGL {
|
||||
[super prepareOpenGL];
|
||||
[self ensureGLContext];
|
||||
glDisable(GL_DITHER);
|
||||
[self setupDisplayLink];
|
||||
}
|
||||
|
||||
- (void)clearGLContext {
|
||||
[self ensureGLContext];
|
||||
self.i420TextureCache = nil;
|
||||
[super clearGLContext];
|
||||
}
|
||||
|
||||
- (void)drawRect:(NSRect)rect {
|
||||
[self drawFrame];
|
||||
}
|
||||
|
||||
- (void)drawFrame {
|
||||
RTCVideoFrame *frame = self.videoFrame;
|
||||
if (!frame || frame == _lastDrawnFrame) {
|
||||
return;
|
||||
}
|
||||
// This method may be called from CVDisplayLink callback which isn't on the
|
||||
// main thread so we have to lock the GL context before drawing.
|
||||
NSOpenGLContext *context = [self openGLContext];
|
||||
CGLLockContext([context CGLContextObj]);
|
||||
|
||||
[self ensureGLContext];
|
||||
glClear(GL_COLOR_BUFFER_BIT);
|
||||
|
||||
|
||||
// Rendering native CVPixelBuffer is not supported on OS X.
|
||||
// TODO(magjed): Add support for NV12 texture cache on OS X.
|
||||
frame = [frame newI420VideoFrame];
|
||||
if (!self.i420TextureCache) {
|
||||
self.i420TextureCache = [[RTCI420TextureCache alloc] initWithContext:context];
|
||||
}
|
||||
RTCVideoRotation rotation = frame.rotation;
|
||||
|
||||
RTCI420TextureCache *i420TextureCache = self.i420TextureCache;
|
||||
if (i420TextureCache) {
|
||||
[i420TextureCache uploadFrameToTextures:frame];
|
||||
[_shader applyShadingForFrameWithWidth:frame.width
|
||||
height:frame.height
|
||||
rotation:rotation
|
||||
yPlane:i420TextureCache.yTexture
|
||||
uPlane:i420TextureCache.uTexture
|
||||
vPlane:i420TextureCache.vTexture];
|
||||
[context flushBuffer];
|
||||
_lastDrawnFrame = frame;
|
||||
}
|
||||
CGLUnlockContext([context CGLContextObj]);
|
||||
|
||||
if (!_firstFrameReceivedReported && _onFirstFrameReceived) {
|
||||
_firstFrameReceivedReported = true;
|
||||
float aspectRatio = (float)frame.width / (float)frame.height;
|
||||
dispatch_async(dispatch_get_main_queue(), ^{
|
||||
self->_onFirstFrameReceived(aspectRatio);
|
||||
});
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
- (void)setupDisplayLink {
|
||||
if (_displayLink) {
|
||||
return;
|
||||
}
|
||||
// Synchronize buffer swaps with vertical refresh rate.
|
||||
GLint swapInt = 1;
|
||||
[[self openGLContext] setValues:&swapInt forParameter:NSOpenGLCPSwapInterval];
|
||||
|
||||
// Create display link.
|
||||
CVDisplayLinkCreateWithActiveCGDisplays(&_displayLink);
|
||||
CVDisplayLinkSetOutputCallback(_displayLink,
|
||||
&OnDisplayLinkFired,
|
||||
(__bridge void *)self);
|
||||
// Set the display link for the current renderer.
|
||||
CGLContextObj cglContext = [[self openGLContext] CGLContextObj];
|
||||
CGLPixelFormatObj cglPixelFormat = [[self pixelFormat] CGLPixelFormatObj];
|
||||
CVDisplayLinkSetCurrentCGDisplayFromOpenGLContext(
|
||||
_displayLink, cglContext, cglPixelFormat);
|
||||
CVDisplayLinkStart(_displayLink);
|
||||
}
|
||||
|
||||
-(void)setFrameOrigin:(NSPoint)newOrigin {
|
||||
[super setFrameOrigin:newOrigin];
|
||||
}
|
||||
|
||||
- (void)teardownDisplayLink {
|
||||
if (!_displayLink) {
|
||||
return;
|
||||
}
|
||||
CVDisplayLinkRelease(_displayLink);
|
||||
_displayLink = NULL;
|
||||
}
|
||||
|
||||
- (void)ensureGLContext {
|
||||
NSOpenGLContext* context = [self openGLContext];
|
||||
NSAssert(context, @"context shouldn't be nil");
|
||||
if ([NSOpenGLContext currentContext] != context) {
|
||||
[context makeCurrentContext];
|
||||
}
|
||||
}
|
||||
|
||||
- (void)dealloc {
|
||||
[self teardownDisplayLink];
|
||||
}
|
||||
|
||||
- (void)setOnFirstFrameReceived:(void (^ _Nullable)(float))onFirstFrameReceived {
|
||||
_onFirstFrameReceived = [onFirstFrameReceived copy];
|
||||
_firstFrameReceivedReported = false;
|
||||
}
|
||||
|
||||
|
||||
@end
|
||||
|
||||
|
||||
|
||||
|
||||
@interface GLVideoView ()
|
||||
@property(nonatomic, strong) OpenGLVideoView *glView;
|
||||
@end
|
||||
|
||||
@implementation GLVideoView {
|
||||
|
||||
CGSize _currentSize;
|
||||
|
||||
std::shared_ptr<VideoRendererAdapterImpl> _sink;
|
||||
|
||||
void (^_onOrientationUpdated)(int);
|
||||
void (^_onIsMirroredUpdated)(bool);
|
||||
|
||||
bool _didSetShouldBeMirrored;
|
||||
bool _shouldBeMirrored;
|
||||
bool _forceMirrored;
|
||||
|
||||
}
|
||||
|
||||
@synthesize delegate = _delegate;
|
||||
|
||||
-(instancetype)initWithFrame:(NSRect)frameRect {
|
||||
NSOpenGLPixelFormatAttribute attributes[] = {
|
||||
NSOpenGLPFADoubleBuffer,
|
||||
NSOpenGLPFADepthSize, 24,
|
||||
NSOpenGLPFAOpenGLProfile,
|
||||
NSOpenGLProfileVersion3_2Core,
|
||||
0
|
||||
};
|
||||
NSOpenGLPixelFormat* pixelFormat =
|
||||
[[NSOpenGLPixelFormat alloc] initWithAttributes:attributes];
|
||||
return [self initWithFrame:frameRect pixelFormat: pixelFormat];
|
||||
}
|
||||
|
||||
- (instancetype)initWithFrame:(NSRect)frame pixelFormat:(NSOpenGLPixelFormat *)format {
|
||||
return [self initWithFrame:frame pixelFormat:format shader:[[RTCDefaultShader alloc] init]];
|
||||
}
|
||||
|
||||
- (instancetype)initWithFrame:(NSRect)frame
|
||||
pixelFormat:(NSOpenGLPixelFormat *)format
|
||||
shader:(id<RTCVideoViewShading>)shader {
|
||||
if (self = [super initWithFrame:frame]) {
|
||||
|
||||
_glView = [[OpenGLVideoView alloc] initWithFrame:frame pixelFormat:format shader:shader];
|
||||
_glView.wantsLayer = YES;
|
||||
self.layerContentsRedrawPolicy = NSViewLayerContentsRedrawDuringViewResize;
|
||||
_glView.layerContentsRedrawPolicy = NSViewLayerContentsRedrawDuringViewResize;
|
||||
|
||||
[self addSubview:_glView];
|
||||
|
||||
__weak GLVideoView *weakSelf = self;
|
||||
|
||||
self.wantsLayer = YES;
|
||||
|
||||
_sink.reset(new VideoRendererAdapterImpl(^(CGSize size, RTCVideoFrame *videoFrame, RTCVideoRotation rotation) {
|
||||
dispatch_async(dispatch_get_main_queue(), ^{
|
||||
__strong GLVideoView *strongSelf = weakSelf;
|
||||
if (strongSelf == nil) {
|
||||
return;
|
||||
}
|
||||
if (!CGSizeEqualToSize(size, strongSelf->_currentSize)) {
|
||||
strongSelf->_currentSize = size;
|
||||
[strongSelf setSize:size];
|
||||
}
|
||||
|
||||
int mappedValue = 0;
|
||||
switch (rotation) {
|
||||
case RTCVideoRotation_90:
|
||||
mappedValue = 0;
|
||||
break;
|
||||
case RTCVideoRotation_180:
|
||||
mappedValue = 1;
|
||||
break;
|
||||
case RTCVideoRotation_270:
|
||||
mappedValue = 2;
|
||||
break;
|
||||
default:
|
||||
mappedValue = 0;
|
||||
break;
|
||||
}
|
||||
[strongSelf setInternalOrientation:mappedValue];
|
||||
|
||||
[strongSelf renderFrame:videoFrame];
|
||||
});
|
||||
}));
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
|
||||
|
||||
- (CALayerContentsGravity)videoContentMode {
|
||||
return self.glView.layer.contentsGravity;
|
||||
}
|
||||
|
||||
- (void)setVideoContentMode:(CALayerContentsGravity)mode {
|
||||
self.glView.layer.contentsGravity = mode;
|
||||
[self setNeedsLayout:YES];
|
||||
}
|
||||
|
||||
-(void)layout {
|
||||
[super layout];
|
||||
|
||||
if (self.bounds.size.width > 0.0f && _currentSize.width > 0) {
|
||||
|
||||
NSSize size = _currentSize;
|
||||
NSSize frameSize = self.frame.size;
|
||||
if ( self.glView.layer.contentsGravity == kCAGravityResizeAspectFill) {
|
||||
size = aspectFitted(frameSize, _currentSize);
|
||||
} else {
|
||||
size = aspectFilled(frameSize, _currentSize);
|
||||
}
|
||||
_glView.frame = CGRectMake(floor((self.bounds.size.width - size.width) / 2.0), floor((self.bounds.size.height - size.height) / 2.0), size.width, size.height);
|
||||
}
|
||||
|
||||
if (_shouldBeMirrored || _forceMirrored) {
|
||||
self.glView.layer.anchorPoint = NSMakePoint(1, 0);
|
||||
self.glView.layer.affineTransform = CGAffineTransformMakeScale(-1, 1);
|
||||
} else {
|
||||
self.glView.layer.anchorPoint = NSMakePoint(0, 0);
|
||||
self.glView.layer.affineTransform = CGAffineTransformIdentity;
|
||||
}
|
||||
}
|
||||
|
||||
- (void)setSize:(CGSize)size {
|
||||
[self.delegate videoView:self didChangeVideoSize:size];
|
||||
[self setNeedsLayout:YES];
|
||||
}
|
||||
|
||||
- (void)renderFrame:(RTCVideoFrame *)videoFrame {
|
||||
self.glView.videoFrame = videoFrame;
|
||||
|
||||
if ([videoFrame.buffer isKindOfClass:[RTCCVPixelBuffer class]]) {
|
||||
RTCCVPixelBuffer *buffer = (RTCCVPixelBuffer*)videoFrame.buffer;
|
||||
if ([buffer isKindOfClass:[TGRTCCVPixelBuffer class]]) {
|
||||
bool shouldBeMirrored = ((TGRTCCVPixelBuffer *)buffer).shouldBeMirrored;
|
||||
if (shouldBeMirrored != _shouldBeMirrored) {
|
||||
_shouldBeMirrored = shouldBeMirrored;
|
||||
if (shouldBeMirrored || _forceMirrored) {
|
||||
self.glView.layer.anchorPoint = NSMakePoint(1, 0);
|
||||
self.glView.layer.affineTransform = CGAffineTransformMakeScale(-1, 1);
|
||||
} else {
|
||||
self.glView.layer.anchorPoint = NSMakePoint(0, 0);
|
||||
self.glView.layer.affineTransform = CGAffineTransformIdentity;
|
||||
}
|
||||
}
|
||||
|
||||
if (shouldBeMirrored != _shouldBeMirrored) {
|
||||
if (_didSetShouldBeMirrored) {
|
||||
if (_onIsMirroredUpdated) {
|
||||
_onIsMirroredUpdated(_shouldBeMirrored);
|
||||
}
|
||||
} else {
|
||||
_didSetShouldBeMirrored = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#pragma mark - Private
|
||||
|
||||
|
||||
|
||||
- (std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>>)getSink {
|
||||
assert([NSThread isMainThread]);
|
||||
|
||||
return _sink;
|
||||
}
|
||||
|
||||
- (void)setOnFirstFrameReceived:(void (^ _Nullable)(float))onFirstFrameReceived {
|
||||
[self.glView setOnFirstFrameReceived:onFirstFrameReceived];
|
||||
}
|
||||
|
||||
- (void)setInternalOrientation:(int)internalOrientation {
|
||||
_internalOrientation = internalOrientation;
|
||||
if (_onOrientationUpdated) {
|
||||
_onOrientationUpdated(internalOrientation);
|
||||
}
|
||||
}
|
||||
|
||||
- (void)internalSetOnOrientationUpdated:(void (^ _Nullable)(int))onOrientationUpdated {
|
||||
_onOrientationUpdated = [onOrientationUpdated copy];
|
||||
}
|
||||
|
||||
- (void)internalSetOnIsMirroredUpdated:(void (^ _Nullable)(bool))onIsMirroredUpdated {
|
||||
}
|
||||
|
||||
- (void)setIsForceMirrored:(BOOL)forceMirrored {
|
||||
_forceMirrored = forceMirrored;
|
||||
[self setNeedsLayout:YES];
|
||||
}
|
||||
|
||||
@end
|
||||
|
||||
#endif // !TARGET_OS_IPHONE
|
|
@ -154,13 +154,18 @@ static webrtc::ObjCVideoTrackSource *getObjCVideoSource(const rtc::scoped_refptr
|
|||
@interface VideoCameraCapturer () <AVCaptureVideoDataOutputSampleBufferDelegate> {
|
||||
rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> _source;
|
||||
|
||||
// Live on main thread.
|
||||
bool _isFrontCamera;
|
||||
|
||||
dispatch_queue_t _frameQueue;
|
||||
|
||||
// Live on RTCDispatcherTypeCaptureSession.
|
||||
AVCaptureDevice *_currentDevice;
|
||||
BOOL _hasRetriedOnFatalError;
|
||||
BOOL _isRunning;
|
||||
BOOL _willBeRunning;
|
||||
|
||||
// Live on RTCDispatcherTypeCaptureSession and main thread.
|
||||
std::atomic<bool> _willBeRunning;
|
||||
|
||||
AVCaptureVideoDataOutput *_videoDataOutput;
|
||||
AVCaptureSession *_captureSession;
|
||||
|
@ -170,16 +175,21 @@ static webrtc::ObjCVideoTrackSource *getObjCVideoSource(const rtc::scoped_refptr
|
|||
UIDeviceOrientation _orientation;
|
||||
bool _rotationLock;
|
||||
|
||||
// Live on mainThread.
|
||||
void (^_isActiveUpdated)(bool);
|
||||
bool _isActiveValue;
|
||||
bool _inForegroundValue;
|
||||
bool _isPaused;
|
||||
|
||||
// Live on frameQueue and main thread.
|
||||
std::atomic<bool> _isPaused;
|
||||
|
||||
// Live on frameQueue.
|
||||
float _aspectRatio;
|
||||
std::vector<uint8_t> _croppingBuffer;
|
||||
std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> _uncroppedSink;
|
||||
|
||||
int _warmupFrameCount;
|
||||
// Live on frameQueue and RTCDispatcherTypeCaptureSession.
|
||||
std::atomic<int> _warmupFrameCount;
|
||||
}
|
||||
|
||||
@end
|
||||
|
@ -292,18 +302,22 @@ static webrtc::ObjCVideoTrackSource *getObjCVideoSource(const rtc::scoped_refptr
|
|||
}
|
||||
|
||||
- (void)setUncroppedSink:(std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>>)sink {
|
||||
_uncroppedSink = sink;
|
||||
dispatch_async(self.frameQueue, ^{
|
||||
_uncroppedSink = sink;
|
||||
});
|
||||
}
|
||||
|
||||
- (void)setPreferredCaptureAspectRatio:(float)aspectRatio {
|
||||
_aspectRatio = aspectRatio;
|
||||
dispatch_async(self.frameQueue, ^{
|
||||
_aspectRatio = aspectRatio;
|
||||
});
|
||||
}
|
||||
|
||||
- (void)startCaptureWithDevice:(AVCaptureDevice *)device
|
||||
format:(AVCaptureDeviceFormat *)format
|
||||
fps:(NSInteger)fps
|
||||
completionHandler:(nullable void (^)(NSError *))completionHandler {
|
||||
_willBeRunning = YES;
|
||||
_willBeRunning = true;
|
||||
[RTCDispatcher
|
||||
dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
|
||||
block:^{
|
||||
|
@ -323,7 +337,7 @@ static webrtc::ObjCVideoTrackSource *getObjCVideoSource(const rtc::scoped_refptr
|
|||
if (completionHandler) {
|
||||
completionHandler(error);
|
||||
}
|
||||
_willBeRunning = NO;
|
||||
_willBeRunning = false;
|
||||
return;
|
||||
}
|
||||
[self reconfigureCaptureSessionInput];
|
||||
|
@ -340,7 +354,7 @@ static webrtc::ObjCVideoTrackSource *getObjCVideoSource(const rtc::scoped_refptr
|
|||
}
|
||||
|
||||
- (void)stopCaptureWithCompletionHandler:(nullable void (^)(void))completionHandler {
|
||||
_willBeRunning = NO;
|
||||
_willBeRunning = false;
|
||||
[RTCDispatcher
|
||||
dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
|
||||
block:^{
|
||||
|
|
|
@ -20,7 +20,7 @@
|
|||
- (void)setIsEnabled:(bool)isEnabled;
|
||||
- (void)setPreferredCaptureAspectRatio:(float)aspectRatio;
|
||||
- (void)setUncroppedSink:(std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>>)sink;
|
||||
|
||||
- (BOOL)deviceIsCaptureCompitable:(AVCaptureDevice *)device;
|
||||
|
||||
@end
|
||||
#endif //WEBRTC_MAC
|
||||
|
|
|
@ -157,9 +157,13 @@ static webrtc::ObjCVideoTrackSource *getObjCVideoSource(const rtc::scoped_refptr
|
|||
|
||||
dispatch_queue_t _frameQueue;
|
||||
AVCaptureDevice *_currentDevice;
|
||||
|
||||
// Live on RTCDispatcherTypeCaptureSession.
|
||||
BOOL _hasRetriedOnFatalError;
|
||||
BOOL _isRunning;
|
||||
BOOL _willBeRunning;
|
||||
|
||||
// Live on RTCDispatcherTypeCaptureSession and main thread.
|
||||
std::atomic<bool> _willBeRunning;
|
||||
|
||||
AVCaptureVideoDataOutput *_videoDataOutput;
|
||||
AVCaptureSession *_captureSession;
|
||||
|
@ -171,15 +175,21 @@ static webrtc::ObjCVideoTrackSource *getObjCVideoSource(const rtc::scoped_refptr
|
|||
FourCharCode _outputPixelFormat;
|
||||
RTCVideoRotation _rotation;
|
||||
|
||||
// Live on mainThread.
|
||||
void (^_isActiveUpdated)(bool);
|
||||
bool _isActiveValue;
|
||||
bool _inForegroundValue;
|
||||
bool _isPaused;
|
||||
int _skippedFrame;
|
||||
|
||||
// Live on frameQueue and main thread.
|
||||
std::atomic<bool> _isPaused;
|
||||
std::atomic<int> _skippedFrame;
|
||||
|
||||
// Live on frameQueue;
|
||||
float _aspectRatio;
|
||||
std::vector<uint8_t> _croppingBuffer;
|
||||
std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> _uncroppedSink;
|
||||
|
||||
int _warmupFrameCount;
|
||||
|
||||
}
|
||||
|
||||
|
@ -197,6 +207,8 @@ static webrtc::ObjCVideoTrackSource *getObjCVideoSource(const rtc::scoped_refptr
|
|||
_isPaused = false;
|
||||
_skippedFrame = 0;
|
||||
_rotation = RTCVideoRotation_0;
|
||||
|
||||
_warmupFrameCount = 100;
|
||||
|
||||
if (![self setupCaptureSession:[[AVCaptureSession alloc] init]]) {
|
||||
return nil;
|
||||
|
@ -211,7 +223,21 @@ static webrtc::ObjCVideoTrackSource *getObjCVideoSource(const rtc::scoped_refptr
|
|||
}
|
||||
|
||||
+ (NSArray<AVCaptureDevice *> *)captureDevices {
|
||||
return [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
|
||||
AVCaptureDevice * defaultDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
|
||||
NSMutableArray<AVCaptureDevice *> * devices = [[AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo] mutableCopy];
|
||||
|
||||
[devices insertObject:defaultDevice atIndex:0];
|
||||
|
||||
return devices;
|
||||
}
|
||||
|
||||
- (BOOL)deviceIsCaptureCompitable:(AVCaptureDevice *)device {
|
||||
if (![device isConnected] || [device isSuspended]) {
|
||||
return NO;
|
||||
}
|
||||
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device error:nil];
|
||||
|
||||
return [_captureSession canAddInput:input];
|
||||
}
|
||||
|
||||
+ (NSArray<AVCaptureDeviceFormat *> *)supportedFormatsForDevice:(AVCaptureDevice *)device {
|
||||
|
@ -264,11 +290,15 @@ static webrtc::ObjCVideoTrackSource *getObjCVideoSource(const rtc::scoped_refptr
|
|||
|
||||
|
||||
- (void)setUncroppedSink:(std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>>)sink {
|
||||
_uncroppedSink = sink;
|
||||
dispatch_async(self.frameQueue, ^{
|
||||
_uncroppedSink = sink;
|
||||
});
|
||||
}
|
||||
|
||||
- (void)setPreferredCaptureAspectRatio:(float)aspectRatio {
|
||||
_aspectRatio = aspectRatio;
|
||||
dispatch_async(self.frameQueue, ^{
|
||||
_aspectRatio = MAX(0.7, aspectRatio);
|
||||
});
|
||||
}
|
||||
|
||||
- (void)updateIsActiveValue {
|
||||
|
@ -286,7 +316,7 @@ static webrtc::ObjCVideoTrackSource *getObjCVideoSource(const rtc::scoped_refptr
|
|||
format:(AVCaptureDeviceFormat *)format
|
||||
fps:(NSInteger)fps
|
||||
completionHandler:(nullable void (^)(NSError *))completionHandler {
|
||||
_willBeRunning = YES;
|
||||
_willBeRunning = true;
|
||||
[RTCDispatcher
|
||||
dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
|
||||
block:^{
|
||||
|
@ -302,7 +332,7 @@ static webrtc::ObjCVideoTrackSource *getObjCVideoSource(const rtc::scoped_refptr
|
|||
if (completionHandler) {
|
||||
completionHandler(error);
|
||||
}
|
||||
self->_willBeRunning = NO;
|
||||
self->_willBeRunning = false;
|
||||
return;
|
||||
}
|
||||
[self reconfigureCaptureSessionInput];
|
||||
|
@ -318,7 +348,7 @@ static webrtc::ObjCVideoTrackSource *getObjCVideoSource(const rtc::scoped_refptr
|
|||
}
|
||||
|
||||
- (void)stopCaptureWithCompletionHandler:(nullable void (^)(void))completionHandler {
|
||||
_willBeRunning = NO;
|
||||
_willBeRunning = false;
|
||||
[RTCDispatcher
|
||||
dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
|
||||
block:^{
|
||||
|
@ -344,6 +374,12 @@ static webrtc::ObjCVideoTrackSource *getObjCVideoSource(const rtc::scoped_refptr
|
|||
fromConnection:(AVCaptureConnection *)connection {
|
||||
NSParameterAssert(captureOutput == _videoDataOutput);
|
||||
|
||||
int minWarmupFrameCount = 12;
|
||||
_warmupFrameCount++;
|
||||
if (_warmupFrameCount < minWarmupFrameCount) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (CMSampleBufferGetNumSamples(sampleBuffer) != 1 || !CMSampleBufferIsValid(sampleBuffer) ||
|
||||
!CMSampleBufferDataIsReady(sampleBuffer)) {
|
||||
return;
|
||||
|
@ -374,6 +410,8 @@ static webrtc::ObjCVideoTrackSource *getObjCVideoSource(const rtc::scoped_refptr
|
|||
|
||||
rtcPixelBuffer = [[TGRTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBuffer adaptedWidth:width adaptedHeight:height cropWidth:width cropHeight:height cropX:left cropY:top];
|
||||
|
||||
rtcPixelBuffer.shouldBeMirrored = YES;
|
||||
|
||||
CVPixelBufferRef outputPixelBufferRef = NULL;
|
||||
OSType pixelFormat = CVPixelBufferGetPixelFormatType(rtcPixelBuffer.pixelBuffer);
|
||||
CVPixelBufferCreate(NULL, width, height, pixelFormat, NULL, &outputPixelBufferRef);
|
||||
|
@ -384,6 +422,7 @@ static webrtc::ObjCVideoTrackSource *getObjCVideoSource(const rtc::scoped_refptr
|
|||
}
|
||||
if ([rtcPixelBuffer cropAndScaleTo:outputPixelBufferRef withTempBuffer:_croppingBuffer.data()]) {
|
||||
rtcPixelBuffer = [[TGRTCCVPixelBuffer alloc] initWithPixelBuffer:outputPixelBufferRef];
|
||||
rtcPixelBuffer.shouldBeMirrored = YES;
|
||||
}
|
||||
CVPixelBufferRelease(outputPixelBufferRef);
|
||||
}
|
||||
|
@ -480,6 +519,7 @@ static webrtc::ObjCVideoTrackSource *getObjCVideoSource(const rtc::scoped_refptr
|
|||
if (!self->_hasRetriedOnFatalError) {
|
||||
RTCLogWarning(@"Attempting to recover from fatal capture error.");
|
||||
[self handleNonFatalError];
|
||||
self->_warmupFrameCount = 0;
|
||||
self->_hasRetriedOnFatalError = YES;
|
||||
} else {
|
||||
RTCLogError(@"Previous fatal error recovery failed.");
|
||||
|
@ -492,6 +532,7 @@ static webrtc::ObjCVideoTrackSource *getObjCVideoSource(const rtc::scoped_refptr
|
|||
block:^{
|
||||
RTCLog(@"Restarting capture session after error.");
|
||||
if (self->_isRunning) {
|
||||
self->_warmupFrameCount = 0;
|
||||
[self->_captureSession startRunning];
|
||||
}
|
||||
}];
|
||||
|
@ -504,6 +545,7 @@ static webrtc::ObjCVideoTrackSource *getObjCVideoSource(const rtc::scoped_refptr
|
|||
block:^{
|
||||
if (self->_isRunning && !self->_captureSession.isRunning) {
|
||||
RTCLog(@"Restarting capture session on active.");
|
||||
self->_warmupFrameCount = 0;
|
||||
[self->_captureSession startRunning];
|
||||
}
|
||||
}];
|
||||
|
|
|
@ -16,7 +16,7 @@ namespace tgcalls {
|
|||
|
||||
class VideoCapturerInterfaceImpl : public VideoCapturerInterface {
|
||||
public:
|
||||
VideoCapturerInterfaceImpl(rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> source, bool useFrontCamera, std::function<void(VideoState)> stateUpdated);
|
||||
VideoCapturerInterfaceImpl(rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> source, bool useFrontCamera, std::function<void(VideoState)> stateUpdated, std::pair<int, int> &outResolution);
|
||||
~VideoCapturerInterfaceImpl() override;
|
||||
|
||||
void setState(VideoState state) override;
|
||||
|
|
|
@ -32,6 +32,28 @@
|
|||
|
||||
#import "VideoCaptureInterface.h"
|
||||
|
||||
@interface VideoCapturerInterfaceImplSourceDescription : NSObject
|
||||
|
||||
@property (nonatomic, readonly) bool isFrontCamera;
|
||||
@property (nonatomic, strong, readonly, nonnull) AVCaptureDevice *device;
|
||||
@property (nonatomic, strong, readonly, nonnull) AVCaptureDeviceFormat *format;
|
||||
|
||||
@end
|
||||
|
||||
@implementation VideoCapturerInterfaceImplSourceDescription
|
||||
|
||||
- (instancetype)initWithIsFrontCamera:(bool)isFrontCamera device:(AVCaptureDevice * _Nonnull)device format:(AVCaptureDeviceFormat * _Nonnull)format {
|
||||
self = [super init];
|
||||
if (self != nil) {
|
||||
_isFrontCamera = isFrontCamera;
|
||||
_device = device;
|
||||
_format = format;
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
@end
|
||||
|
||||
@interface VideoCapturerInterfaceImplReference : NSObject {
|
||||
VideoCameraCapturer *_videoCapturer;
|
||||
}
|
||||
|
@ -40,90 +62,118 @@
|
|||
|
||||
@implementation VideoCapturerInterfaceImplReference
|
||||
|
||||
- (instancetype)initWithSource:(rtc::scoped_refptr<webrtc::VideoTrackSourceInterface>)source useFrontCamera:(bool)useFrontCamera isActiveUpdated:(void (^)(bool))isActiveUpdated {
|
||||
+ (AVCaptureDevice *)selectCaptureDevice:(bool)useFrontCamera {
|
||||
AVCaptureDevice *selectedCamera = nil;
|
||||
|
||||
#ifdef WEBRTC_IOS
|
||||
AVCaptureDevice *frontCamera = nil;
|
||||
AVCaptureDevice *backCamera = nil;
|
||||
for (AVCaptureDevice *device in [VideoCameraCapturer captureDevices]) {
|
||||
if (device.position == AVCaptureDevicePositionFront) {
|
||||
frontCamera = device;
|
||||
} else if (device.position == AVCaptureDevicePositionBack) {
|
||||
backCamera = device;
|
||||
}
|
||||
}
|
||||
if (useFrontCamera && frontCamera != nil) {
|
||||
selectedCamera = frontCamera;
|
||||
} else {
|
||||
selectedCamera = backCamera;
|
||||
}
|
||||
#else
|
||||
NSArray<AVCaptureDevice *> *devices = [VideoCameraCapturer captureDevices];
|
||||
for (int i = 0; i < devices.count; i++) {
|
||||
if ([_videoCapturer deviceIsCaptureCompitable:devices[i]]) {
|
||||
selectedCamera = devices[i];
|
||||
break;
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
return selectedCamera;
|
||||
}
|
||||
|
||||
+ (AVCaptureDeviceFormat *)selectCaptureDeviceFormatForDevice:(AVCaptureDevice *)selectedCamera {
|
||||
NSArray<AVCaptureDeviceFormat *> *sortedFormats = [[VideoCameraCapturer supportedFormatsForDevice:selectedCamera] sortedArrayUsingComparator:^NSComparisonResult(AVCaptureDeviceFormat* lhs, AVCaptureDeviceFormat *rhs) {
|
||||
int32_t width1 = CMVideoFormatDescriptionGetDimensions(lhs.formatDescription).width;
|
||||
int32_t width2 = CMVideoFormatDescriptionGetDimensions(rhs.formatDescription).width;
|
||||
return width1 < width2 ? NSOrderedAscending : NSOrderedDescending;
|
||||
}];
|
||||
|
||||
AVCaptureDeviceFormat *bestFormat = sortedFormats.firstObject;
|
||||
|
||||
bool didSelectPreferredFormat = false;
|
||||
#ifdef WEBRTC_IOS
|
||||
for (AVCaptureDeviceFormat *format in sortedFormats) {
|
||||
CMVideoDimensions dimensions = CMVideoFormatDescriptionGetDimensions(format.formatDescription);
|
||||
if (dimensions.width == 1280 && dimensions.height == 720) {
|
||||
if (format.videoFieldOfView > 60.0f && format.videoSupportedFrameRateRanges.lastObject.maxFrameRate == 30) {
|
||||
didSelectPreferredFormat = true;
|
||||
bestFormat = format;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
#endif
|
||||
if (!didSelectPreferredFormat) {
|
||||
for (AVCaptureDeviceFormat *format in sortedFormats) {
|
||||
CMVideoDimensions dimensions = CMVideoFormatDescriptionGetDimensions(format.formatDescription);
|
||||
if (dimensions.width >= 1000 || dimensions.height >= 1000) {
|
||||
bestFormat = format;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (bestFormat == nil) {
|
||||
assert(false);
|
||||
return nil;
|
||||
}
|
||||
|
||||
AVFrameRateRange *frameRateRange = [[bestFormat.videoSupportedFrameRateRanges sortedArrayUsingComparator:^NSComparisonResult(AVFrameRateRange *lhs, AVFrameRateRange *rhs) {
|
||||
if (lhs.maxFrameRate < rhs.maxFrameRate) {
|
||||
return NSOrderedAscending;
|
||||
} else {
|
||||
return NSOrderedDescending;
|
||||
}
|
||||
}] lastObject];
|
||||
|
||||
if (frameRateRange == nil) {
|
||||
assert(false);
|
||||
return nil;
|
||||
}
|
||||
|
||||
return bestFormat;
|
||||
}
|
||||
|
||||
+ (VideoCapturerInterfaceImplSourceDescription *)selectCapturerDescription:(bool)useFrontCamera {
|
||||
AVCaptureDevice *selectedCamera = [VideoCapturerInterfaceImplReference selectCaptureDevice:useFrontCamera];
|
||||
|
||||
if (selectedCamera == nil) {
|
||||
return nil;
|
||||
}
|
||||
|
||||
AVCaptureDeviceFormat *bestFormat = [VideoCapturerInterfaceImplReference selectCaptureDeviceFormatForDevice:selectedCamera];
|
||||
|
||||
if (bestFormat == nil) {
|
||||
return nil;
|
||||
}
|
||||
|
||||
return [[VideoCapturerInterfaceImplSourceDescription alloc] initWithIsFrontCamera:useFrontCamera device:selectedCamera format:bestFormat];
|
||||
}
|
||||
|
||||
- (instancetype)initWithSource:(rtc::scoped_refptr<webrtc::VideoTrackSourceInterface>)source sourceDescription:(VideoCapturerInterfaceImplSourceDescription *)sourceDescription isActiveUpdated:(void (^)(bool))isActiveUpdated {
|
||||
self = [super init];
|
||||
if (self != nil) {
|
||||
assert([NSThread isMainThread]);
|
||||
|
||||
#ifdef WEBRTC_IOS
|
||||
_videoCapturer = [[VideoCameraCapturer alloc] initWithSource:source useFrontCamera:useFrontCamera isActiveUpdated:isActiveUpdated];
|
||||
_videoCapturer = [[VideoCameraCapturer alloc] initWithSource:source useFrontCamera:sourceDescription.isFrontCamera isActiveUpdated:isActiveUpdated];
|
||||
#else
|
||||
_videoCapturer = [[VideoCameraCapturer alloc] initWithSource:source isActiveUpdated:isActiveUpdated];
|
||||
#endif
|
||||
AVCaptureDevice *selectedCamera = nil;
|
||||
|
||||
#ifdef WEBRTC_IOS
|
||||
AVCaptureDevice *frontCamera = nil;
|
||||
AVCaptureDevice *backCamera = nil;
|
||||
for (AVCaptureDevice *device in [VideoCameraCapturer captureDevices]) {
|
||||
if (device.position == AVCaptureDevicePositionFront) {
|
||||
frontCamera = device;
|
||||
} else if (device.position == AVCaptureDevicePositionBack) {
|
||||
backCamera = device;
|
||||
}
|
||||
}
|
||||
if (useFrontCamera && frontCamera != nil) {
|
||||
selectedCamera = frontCamera;
|
||||
} else {
|
||||
selectedCamera = backCamera;
|
||||
}
|
||||
#else
|
||||
selectedCamera = [VideoCameraCapturer captureDevices].firstObject;
|
||||
#endif
|
||||
// NSLog(@"%@", selectedCamera);
|
||||
if (selectedCamera == nil) {
|
||||
return nil;
|
||||
}
|
||||
|
||||
NSArray<AVCaptureDeviceFormat *> *sortedFormats = [[VideoCameraCapturer supportedFormatsForDevice:selectedCamera] sortedArrayUsingComparator:^NSComparisonResult(AVCaptureDeviceFormat* lhs, AVCaptureDeviceFormat *rhs) {
|
||||
int32_t width1 = CMVideoFormatDescriptionGetDimensions(lhs.formatDescription).width;
|
||||
int32_t width2 = CMVideoFormatDescriptionGetDimensions(rhs.formatDescription).width;
|
||||
return width1 < width2 ? NSOrderedAscending : NSOrderedDescending;
|
||||
}];
|
||||
|
||||
AVCaptureDeviceFormat *bestFormat = sortedFormats.firstObject;
|
||||
|
||||
bool didSelectPreferredFormat = false;
|
||||
#ifdef WEBRTC_IOS
|
||||
for (AVCaptureDeviceFormat *format in sortedFormats) {
|
||||
CMVideoDimensions dimensions = CMVideoFormatDescriptionGetDimensions(format.formatDescription);
|
||||
if (dimensions.width == 1280 && dimensions.height == 720) {
|
||||
if (format.videoFieldOfView > 60.0f && format.videoSupportedFrameRateRanges.lastObject.maxFrameRate == 30) {
|
||||
didSelectPreferredFormat = true;
|
||||
bestFormat = format;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
#endif
|
||||
if (!didSelectPreferredFormat) {
|
||||
for (AVCaptureDeviceFormat *format in sortedFormats) {
|
||||
CMVideoDimensions dimensions = CMVideoFormatDescriptionGetDimensions(format.formatDescription);
|
||||
if (dimensions.width >= 1000 || dimensions.height >= 1000) {
|
||||
bestFormat = format;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (bestFormat == nil) {
|
||||
assert(false);
|
||||
return nil;
|
||||
}
|
||||
|
||||
AVFrameRateRange *frameRateRange = [[bestFormat.videoSupportedFrameRateRanges sortedArrayUsingComparator:^NSComparisonResult(AVFrameRateRange *lhs, AVFrameRateRange *rhs) {
|
||||
if (lhs.maxFrameRate < rhs.maxFrameRate) {
|
||||
return NSOrderedAscending;
|
||||
} else {
|
||||
return NSOrderedDescending;
|
||||
}
|
||||
}] lastObject];
|
||||
|
||||
if (frameRateRange == nil) {
|
||||
assert(false);
|
||||
return nil;
|
||||
}
|
||||
|
||||
[_videoCapturer startCaptureWithDevice:selectedCamera format:bestFormat fps:30];
|
||||
[_videoCapturer startCaptureWithDevice:sourceDescription.device format:sourceDescription.format fps:30];
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
@ -154,12 +204,23 @@
|
|||
|
||||
namespace tgcalls {
|
||||
|
||||
VideoCapturerInterfaceImpl::VideoCapturerInterfaceImpl(rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> source, bool useFrontCamera, std::function<void(VideoState)> stateUpdated) :
|
||||
VideoCapturerInterfaceImpl::VideoCapturerInterfaceImpl(rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> source, bool useFrontCamera, std::function<void(VideoState)> stateUpdated, std::pair<int, int> &outResolution) :
|
||||
_source(source) {
|
||||
VideoCapturerInterfaceImplSourceDescription *sourceDescription = [VideoCapturerInterfaceImplReference selectCapturerDescription:useFrontCamera];
|
||||
|
||||
CMVideoDimensions dimensions = CMVideoFormatDescriptionGetDimensions(sourceDescription.format.formatDescription);
|
||||
#ifdef WEBRTC_IOS
|
||||
outResolution.first = dimensions.height;
|
||||
outResolution.second = dimensions.width;
|
||||
#else
|
||||
outResolution.first = dimensions.width;
|
||||
outResolution.second = dimensions.height;
|
||||
#endif
|
||||
|
||||
_implReference = [[VideoCapturerInterfaceImplHolder alloc] init];
|
||||
VideoCapturerInterfaceImplHolder *implReference = _implReference;
|
||||
dispatch_async(dispatch_get_main_queue(), ^{
|
||||
VideoCapturerInterfaceImplReference *value = [[VideoCapturerInterfaceImplReference alloc] initWithSource:source useFrontCamera:useFrontCamera isActiveUpdated:^(bool isActive) {
|
||||
VideoCapturerInterfaceImplReference *value = [[VideoCapturerInterfaceImplReference alloc] initWithSource:source sourceDescription:sourceDescription isActiveUpdated:^(bool isActive) {
|
||||
stateUpdated(isActive ? VideoState::Active : VideoState::Paused);
|
||||
}];
|
||||
if (value != nil) {
|
||||
|
|
|
@ -29,7 +29,7 @@
|
|||
- (void)setOnFirstFrameReceived:(void (^ _Nullable)(float))onFirstFrameReceived;
|
||||
- (void)internalSetOnOrientationUpdated:(void (^ _Nullable)(int))onOrientationUpdated;
|
||||
- (void)internalSetOnIsMirroredUpdated:(void (^ _Nullable)(bool))onIsMirroredUpdated;
|
||||
|
||||
- (void)setIsForceMirrored:(BOOL)forceMirrored;
|
||||
@end
|
||||
|
||||
#endif // WEBRTC_MAC
|
||||
|
|
|
@ -19,13 +19,9 @@
|
|||
#import "api/media_stream_interface.h"
|
||||
|
||||
#import "RTCMTLI420Renderer.h"
|
||||
#import "RTCMTLNV12Renderer.h"
|
||||
#import "RTCMTLRGBRenderer.h"
|
||||
|
||||
#define MTKViewClass NSClassFromString(@"MTKView")
|
||||
#define RTCMTLNV12RendererClass NSClassFromString(@"RTCMTLNV12Renderer")
|
||||
#define RTCMTLI420RendererClass NSClassFromString(@"RTCMTLI420Renderer")
|
||||
#define RTCMTLRGBRendererClass NSClassFromString(@"RTCMTLRGBRenderer")
|
||||
|
||||
namespace {
|
||||
|
||||
|
@ -67,7 +63,7 @@ private:
|
|||
|
||||
@interface VideoMetalView () <MTKViewDelegate> {
|
||||
RTCMTLI420Renderer *_rendererI420;
|
||||
RTCMTLNV12Renderer *_rendererNV12;
|
||||
|
||||
MTKView *_metalView;
|
||||
RTCVideoFrame *_videoFrame;
|
||||
CGSize _videoFrameSize;
|
||||
|
@ -83,7 +79,7 @@ private:
|
|||
|
||||
bool _didSetShouldBeMirrored;
|
||||
bool _shouldBeMirrored;
|
||||
|
||||
bool _forceMirrored;
|
||||
}
|
||||
|
||||
@end
|
||||
|
@ -91,7 +87,7 @@ private:
|
|||
@implementation VideoMetalView
|
||||
|
||||
+ (bool)isSupported {
|
||||
return YES;
|
||||
return [VideoMetalView isMetalAvailable];
|
||||
}
|
||||
|
||||
- (instancetype)initWithFrame:(CGRect)frameRect {
|
||||
|
@ -164,10 +160,6 @@ private:
|
|||
return [[MTKViewClass alloc] initWithFrame:frame];
|
||||
}
|
||||
|
||||
+ (RTCMTLNV12Renderer *)createNV12Renderer {
|
||||
return [[RTCMTLNV12RendererClass alloc] init];
|
||||
}
|
||||
|
||||
+ (RTCMTLI420Renderer *)createI420Renderer {
|
||||
return [[RTCMTLI420RendererClass alloc] init];
|
||||
}
|
||||
|
@ -192,7 +184,7 @@ private:
|
|||
- (void)layout {
|
||||
[super layout];
|
||||
|
||||
if (_shouldBeMirrored) {
|
||||
if (_shouldBeMirrored || _forceMirrored) {
|
||||
_metalView.layer.anchorPoint = NSMakePoint(1, 0);
|
||||
_metalView.layer.affineTransform = CGAffineTransformMakeScale(-1, 1);
|
||||
// _metalView.layer.transform = CATransform3DMakeScale(-1, 1, 1);
|
||||
|
@ -232,20 +224,19 @@ private:
|
|||
|
||||
if ([buffer isKindOfClass:[TGRTCCVPixelBuffer class]]) {
|
||||
bool shouldBeMirrored = ((TGRTCCVPixelBuffer *)buffer).shouldBeMirrored;
|
||||
|
||||
if (shouldBeMirrored) {
|
||||
_metalView.layer.anchorPoint = NSMakePoint(1, 0);
|
||||
_metalView.layer.affineTransform = CGAffineTransformMakeScale(-1, 1);
|
||||
// _metalView.layer.transform = CATransform3DMakeScale(-1, 1, 1);
|
||||
} else {
|
||||
_metalView.layer.anchorPoint = NSMakePoint(0, 0);
|
||||
_metalView.layer.affineTransform = CGAffineTransformIdentity;
|
||||
//_metalView.layer.transform = CATransform3DIdentity;
|
||||
}
|
||||
|
||||
if (shouldBeMirrored != _shouldBeMirrored) {
|
||||
_shouldBeMirrored = shouldBeMirrored;
|
||||
bool shouldBeMirrored = ((TGRTCCVPixelBuffer *)buffer).shouldBeMirrored;
|
||||
|
||||
if (shouldBeMirrored || _forceMirrored) {
|
||||
_metalView.layer.anchorPoint = NSMakePoint(1, 0);
|
||||
_metalView.layer.affineTransform = CGAffineTransformMakeScale(-1, 1);
|
||||
// _metalView.layer.transform = CATransform3DMakeScale(-1, 1, 1);
|
||||
} else {
|
||||
_metalView.layer.anchorPoint = NSMakePoint(0, 0);
|
||||
_metalView.layer.affineTransform = CGAffineTransformIdentity;
|
||||
//_metalView.layer.transform = CATransform3DIdentity;
|
||||
}
|
||||
|
||||
if (_didSetShouldBeMirrored) {
|
||||
if (_onIsMirroredUpdated) {
|
||||
|
@ -256,31 +247,25 @@ private:
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!_rendererI420) {
|
||||
_rendererI420 = [VideoMetalView createI420Renderer];
|
||||
if (![_rendererI420 addRenderingDestination:_metalView]) {
|
||||
_rendererI420 = nil;
|
||||
RTCLogError(@"Failed to create I420 renderer");
|
||||
return;
|
||||
}
|
||||
}
|
||||
if (!_rendererI420) {
|
||||
_rendererI420 = [VideoMetalView createI420Renderer];
|
||||
if (![_rendererI420 addRenderingDestination:_metalView]) {
|
||||
_rendererI420 = nil;
|
||||
RTCLogError(@"Failed to create I420 renderer");
|
||||
return;
|
||||
}
|
||||
renderer = _rendererI420;
|
||||
}
|
||||
|
||||
if (!_firstFrameReceivedReported && _onFirstFrameReceived) {
|
||||
_firstFrameReceivedReported = true;
|
||||
_onFirstFrameReceived((float)videoFrame.width / (float)videoFrame.height);
|
||||
}
|
||||
|
||||
|
||||
renderer = _rendererI420;
|
||||
|
||||
renderer.rotationOverride = _rotationOverride;
|
||||
[renderer drawFrame:videoFrame];
|
||||
_lastFrameTimeNs = videoFrame.timeStampNs;
|
||||
|
||||
|
||||
if (!_firstFrameReceivedReported && _onFirstFrameReceived) {
|
||||
_firstFrameReceivedReported = true;
|
||||
_onFirstFrameReceived((float)videoFrame.width / (float)videoFrame.height);
|
||||
}
|
||||
}
|
||||
|
||||
- (void)mtkView:(MTKView *)view drawableSizeWillChange:(CGSize)size {
|
||||
|
@ -346,11 +331,15 @@ private:
|
|||
return;
|
||||
}
|
||||
|
||||
|
||||
|
||||
if (frame == nil) {
|
||||
RTCLogInfo(@"Incoming frame is nil. Exiting render callback.");
|
||||
return;
|
||||
}
|
||||
_videoFrame = frame;
|
||||
|
||||
|
||||
}
|
||||
|
||||
- (std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>>)getSink {
|
||||
|
@ -379,4 +368,10 @@ private:
|
|||
_onIsMirroredUpdated = [onIsMirroredUpdated copy];
|
||||
}
|
||||
|
||||
- (void)setIsForceMirrored:(BOOL)forceMirrored {
|
||||
_forceMirrored = forceMirrored;
|
||||
[self setNeedsLayout:YES];
|
||||
}
|
||||
|
||||
|
||||
@end
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
#include "WindowsInterface.h"
|
||||
#include "DesktopInterface.h"
|
||||
|
||||
#include "platform/tdesktop/VideoCapturerInterfaceImpl.h"
|
||||
#include "platform/tdesktop/VideoCapturerTrackSource.h"
|
||||
|
@ -9,30 +9,32 @@
|
|||
|
||||
namespace tgcalls {
|
||||
|
||||
std::unique_ptr<webrtc::VideoEncoderFactory> WindowsInterface::makeVideoEncoderFactory() {
|
||||
std::unique_ptr<webrtc::VideoEncoderFactory> DesktopInterface::makeVideoEncoderFactory() {
|
||||
return webrtc::CreateBuiltinVideoEncoderFactory();
|
||||
}
|
||||
|
||||
std::unique_ptr<webrtc::VideoDecoderFactory> WindowsInterface::makeVideoDecoderFactory() {
|
||||
std::unique_ptr<webrtc::VideoDecoderFactory> DesktopInterface::makeVideoDecoderFactory() {
|
||||
return webrtc::CreateBuiltinVideoDecoderFactory();
|
||||
}
|
||||
|
||||
rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> WindowsInterface::makeVideoSource(rtc::Thread *signalingThread, rtc::Thread *workerThread) {
|
||||
rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> DesktopInterface::makeVideoSource(rtc::Thread *signalingThread, rtc::Thread *workerThread) {
|
||||
const auto videoTrackSource = VideoCapturerTrackSource::Create();
|
||||
return webrtc::VideoTrackSourceProxy::Create(signalingThread, workerThread, videoTrackSource);
|
||||
return videoTrackSource
|
||||
? webrtc::VideoTrackSourceProxy::Create(signalingThread, workerThread, videoTrackSource)
|
||||
: nullptr;
|
||||
}
|
||||
|
||||
bool WindowsInterface::supportsEncoding(const std::string &codecName) {
|
||||
bool DesktopInterface::supportsEncoding(const std::string &codecName) {
|
||||
return (codecName == cricket::kH264CodecName)
|
||||
|| (codecName == cricket::kVp8CodecName);
|
||||
}
|
||||
|
||||
std::unique_ptr<VideoCapturerInterface> WindowsInterface::makeVideoCapturer(rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> source, bool useFrontCamera, std::function<void(VideoState)> stateUpdated, std::shared_ptr<PlatformContext> platformContext) {
|
||||
std::unique_ptr<VideoCapturerInterface> DesktopInterface::makeVideoCapturer(rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> source, bool useFrontCamera, std::function<void(VideoState)> stateUpdated, std::shared_ptr<PlatformContext> platformContext) {
|
||||
return std::make_unique<VideoCapturerInterfaceImpl>(source, useFrontCamera, stateUpdated);
|
||||
}
|
||||
|
||||
std::unique_ptr<PlatformInterface> CreatePlatformInterface() {
|
||||
return std::make_unique<WindowsInterface>();
|
||||
return std::make_unique<DesktopInterface>();
|
||||
}
|
||||
|
||||
} // namespace tgcalls
|
|
@ -1,12 +1,12 @@
|
|||
#ifndef TGCALLS_WINDOWS_INTERFACE_H
|
||||
#define TGCALLS_WINDOWS_INTERFACE_H
|
||||
#ifndef TGCALLS_DESKTOP_INTERFACE_H
|
||||
#define TGCALLS_DESKTOP_INTERFACE_H
|
||||
|
||||
#include "platform/PlatformInterface.h"
|
||||
#include "VideoCapturerInterface.h"
|
||||
|
||||
namespace tgcalls {
|
||||
|
||||
class WindowsInterface : public PlatformInterface {
|
||||
class DesktopInterface : public PlatformInterface {
|
||||
public:
|
||||
std::unique_ptr<webrtc::VideoEncoderFactory> makeVideoEncoderFactory() override;
|
||||
std::unique_ptr<webrtc::VideoDecoderFactory> makeVideoDecoderFactory() override;
|
||||
|
@ -18,4 +18,4 @@ public:
|
|||
|
||||
} // namespace tgcalls
|
||||
|
||||
#endif
|
||||
#endif // TGCALLS_DESKTOP_INTERFACE_H
|
|
@ -325,7 +325,6 @@ public:
|
|||
dependencies.event_log_factory =
|
||||
std::make_unique<webrtc::RtcEventLogFactory>(dependencies.task_queue_factory.get());
|
||||
dependencies.network_controller_factory = nullptr;
|
||||
//dependencies.media_transport_factory = nullptr;
|
||||
|
||||
_nativeFactory = webrtc::CreateModularPeerConnectionFactory(std::move(dependencies));
|
||||
|
||||
|
@ -1002,8 +1001,10 @@ int InstanceImplReference::GetConnectionMaxLayer() {
|
|||
return 92;
|
||||
}
|
||||
|
||||
std::string InstanceImplReference::GetVersion() {
|
||||
return "2.8.8";
|
||||
std::vector<std::string> InstanceImplReference::GetVersions() {
|
||||
std::vector<std::string> result;
|
||||
result.push_back("2.8.8");
|
||||
return result;
|
||||
}
|
||||
|
||||
std::string InstanceImplReference::getLastError() {
|
||||
|
@ -1027,15 +1028,15 @@ PersistentState InstanceImplReference::getPersistentState() {
|
|||
return PersistentState();
|
||||
}
|
||||
|
||||
FinalState InstanceImplReference::stop() {
|
||||
auto result = FinalState();
|
||||
|
||||
result.persistentState = getPersistentState();
|
||||
result.debugLog = logSink_->result();
|
||||
result.trafficStats = getTrafficStats();
|
||||
result.isRatingSuggested = false;
|
||||
|
||||
return result;
|
||||
void InstanceImplReference::stop(std::function<void(FinalState)> completion) {
|
||||
auto result = FinalState();
|
||||
|
||||
result.persistentState = getPersistentState();
|
||||
result.debugLog = logSink_->result();
|
||||
result.trafficStats = getTrafficStats();
|
||||
result.isRatingSuggested = false;
|
||||
|
||||
completion(result);
|
||||
}
|
||||
|
||||
template <>
|
||||
|
|
|
@ -28,13 +28,13 @@ public:
|
|||
void setAudioOutputDuckingEnabled(bool enabled) override;
|
||||
void setIsLowBatteryLevel(bool isLowBatteryLevel) override;
|
||||
static int GetConnectionMaxLayer();
|
||||
static std::string GetVersion();
|
||||
static std::vector<std::string> GetVersions();
|
||||
std::string getLastError() override;
|
||||
std::string getDebugInfo() override;
|
||||
int64_t getPreferredRelayId() override;
|
||||
TrafficStats getTrafficStats() override;
|
||||
PersistentState getPersistentState() override;
|
||||
FinalState stop() override;
|
||||
void stop(std::function<void(FinalState)> completion) override;
|
||||
|
||||
private:
|
||||
std::unique_ptr<LogSinkImpl> logSink_;
|
||||
|
|
|
@ -19,7 +19,7 @@ public class BuildVars {
|
|||
public static boolean USE_CLOUD_STRINGS = true;
|
||||
public static boolean CHECK_UPDATES = true;
|
||||
public static boolean TON_WALLET_STANDALONE = false;
|
||||
public static int BUILD_VERSION = 2061;
|
||||
public static int BUILD_VERSION = 2064;
|
||||
public static String BUILD_VERSION_STRING = "7.0.0";
|
||||
public static int APP_ID = 4;
|
||||
public static String APP_HASH = "014b35b6184100b085b0d0572f9b5103";
|
||||
|
|
|
@ -288,6 +288,8 @@ public class MessagesController extends BaseController implements NotificationCe
|
|||
private SharedPreferences mainPreferences;
|
||||
private SharedPreferences emojiPreferences;
|
||||
|
||||
public volatile boolean ignoreSetOnline;
|
||||
|
||||
public static class FaqSearchResult {
|
||||
|
||||
public String title;
|
||||
|
@ -2064,6 +2066,7 @@ public class MessagesController extends BaseController implements NotificationCe
|
|||
suggestedFilters.clear();
|
||||
gettingAppChangelog = false;
|
||||
dialogFiltersLoaded = false;
|
||||
ignoreSetOnline = false;
|
||||
|
||||
Utilities.stageQueue.postRunnable(() -> {
|
||||
readTasks.clear();
|
||||
|
@ -4343,7 +4346,7 @@ public class MessagesController extends BaseController implements NotificationCe
|
|||
checkReadTasks();
|
||||
|
||||
if (getUserConfig().isClientActivated()) {
|
||||
if (getConnectionsManager().getPauseTime() == 0 && ApplicationLoader.isScreenOn && !ApplicationLoader.mainInterfacePausedStageQueue) {
|
||||
if (!ignoreSetOnline && getConnectionsManager().getPauseTime() == 0 && ApplicationLoader.isScreenOn && !ApplicationLoader.mainInterfacePausedStageQueue) {
|
||||
if (ApplicationLoader.mainInterfacePausedStageQueueTime != 0 && Math.abs(ApplicationLoader.mainInterfacePausedStageQueueTime - System.currentTimeMillis()) > 1000) {
|
||||
if (statusSettingState != 1 && (lastStatusUpdateTime == 0 || Math.abs(System.currentTimeMillis() - lastStatusUpdateTime) >= 55000 || offlineSent)) {
|
||||
statusSettingState = 1;
|
||||
|
@ -11785,6 +11788,9 @@ public class MessagesController extends BaseController implements NotificationCe
|
|||
} else {
|
||||
ApplicationLoader.applicationContext.startService(intent);
|
||||
}
|
||||
if (ApplicationLoader.mainInterfacePaused || !ApplicationLoader.isScreenOn) {
|
||||
ignoreSetOnline = true;
|
||||
}
|
||||
} catch (Throwable e) {
|
||||
FileLog.e(e);
|
||||
}
|
||||
|
|
|
@ -5,6 +5,8 @@ import org.telegram.messenger.BuildVars;
|
|||
import org.telegram.messenger.FileLog;
|
||||
import org.webrtc.VideoSink;
|
||||
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
|
||||
public class NativeInstance {
|
||||
|
||||
private Instance.OnStateUpdatedListener onStateUpdatedListener;
|
||||
|
@ -69,6 +71,27 @@ public class NativeInstance {
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
private Instance.FinalState finalState;
|
||||
private CountDownLatch stopBarrier;
|
||||
private void onStop(Instance.FinalState state) {
|
||||
finalState = state;
|
||||
if (stopBarrier != null) {
|
||||
stopBarrier.countDown();
|
||||
}
|
||||
}
|
||||
|
||||
public Instance.FinalState stop() {
|
||||
stopBarrier = new CountDownLatch(1);
|
||||
stopNative();
|
||||
try {
|
||||
stopBarrier.await();
|
||||
} catch (Exception e) {
|
||||
FileLog.e(e);
|
||||
}
|
||||
return finalState;
|
||||
}
|
||||
|
||||
private static native long makeNativeInstance(String version, NativeInstance instance, Instance.Config config, String persistentStateFilePath, Instance.Endpoint[] endpoints, Instance.Proxy proxy, int networkType, Instance.EncryptionKey encryptionKey, VideoSink remoteSink, long videoCapturer, float aspectRatio);
|
||||
public static native long createVideoCapturer(VideoSink localSink);
|
||||
public static native void setVideoStateCapturer(long videoCapturer, int videoState);
|
||||
|
@ -87,7 +110,7 @@ public class NativeInstance {
|
|||
public native long getPreferredRelayId();
|
||||
public native Instance.TrafficStats getTrafficStats();
|
||||
public native byte[] getPersistentState();
|
||||
public native Instance.FinalState stop();
|
||||
private native void stopNative();
|
||||
public native void setupOutgoingVideo(VideoSink localSink);
|
||||
public native void switchCamera();
|
||||
public native void setVideoState(int videoState);
|
||||
|
|
|
@ -14,10 +14,7 @@ import org.webrtc.CameraEnumerator;
|
|||
import org.webrtc.CameraVideoCapturer;
|
||||
import org.webrtc.CapturerObserver;
|
||||
import org.webrtc.EglBase;
|
||||
import org.webrtc.NativeAndroidVideoTrackSource;
|
||||
import org.webrtc.NativeCapturerObserver;
|
||||
import org.webrtc.SurfaceTextureHelper;
|
||||
import org.webrtc.VideoSource;
|
||||
|
||||
@TargetApi(18)
|
||||
public class VideoCameraCapturer {
|
||||
|
|
|
@ -62,6 +62,7 @@ import android.view.View;
|
|||
import android.widget.RemoteViews;
|
||||
|
||||
import org.telegram.messenger.AndroidUtilities;
|
||||
import org.telegram.messenger.ApplicationLoader;
|
||||
import org.telegram.messenger.BuildConfig;
|
||||
import org.telegram.messenger.BuildVars;
|
||||
import org.telegram.messenger.ContactsController;
|
||||
|
@ -631,6 +632,9 @@ public abstract class VoIPBaseService extends Service implements SensorEventList
|
|||
}
|
||||
stopForeground(true);
|
||||
stopRinging();
|
||||
if (ApplicationLoader.mainInterfacePaused || !ApplicationLoader.isScreenOn) {
|
||||
MessagesController.getInstance(currentAccount).ignoreSetOnline = false;
|
||||
}
|
||||
NotificationCenter.getInstance(currentAccount).removeObserver(this, NotificationCenter.appDidLogout);
|
||||
SensorManager sm = (SensorManager) getSystemService(SENSOR_SERVICE);
|
||||
Sensor proximity = sm.getDefaultSensor(Sensor.TYPE_PROXIMITY);
|
||||
|
@ -649,10 +653,11 @@ public abstract class VoIPBaseService extends Service implements SensorEventList
|
|||
sharedInstance = null;
|
||||
AndroidUtilities.runOnUIThread(() -> NotificationCenter.getGlobalInstance().postNotificationName(NotificationCenter.didEndCall));
|
||||
if (tgVoip != null) {
|
||||
updateTrafficStats();
|
||||
StatsController.getInstance(currentAccount).incrementTotalCallsTime(getStatsNetworkType(), (int) (getCallDuration() / 1000) % 5);
|
||||
onTgVoipPreStop();
|
||||
onTgVoipStop(tgVoip.stop());
|
||||
Instance.FinalState state = tgVoip.stop();
|
||||
updateTrafficStats(state.trafficStats);
|
||||
onTgVoipStop(state);
|
||||
prevTrafficStats = null;
|
||||
callStartTime = 0;
|
||||
tgVoip = null;
|
||||
|
@ -802,8 +807,10 @@ public abstract class VoIPBaseService extends Service implements SensorEventList
|
|||
}
|
||||
}
|
||||
|
||||
protected void updateTrafficStats() {
|
||||
final Instance.TrafficStats trafficStats = tgVoip.getTrafficStats();
|
||||
protected void updateTrafficStats(Instance.TrafficStats trafficStats) {
|
||||
if (trafficStats == null) {
|
||||
trafficStats = tgVoip.getTrafficStats();
|
||||
}
|
||||
final long wifiSentDiff = trafficStats.bytesSentWifi - (prevTrafficStats != null ? prevTrafficStats.bytesSentWifi : 0);
|
||||
final long wifiRecvdDiff = trafficStats.bytesReceivedWifi - (prevTrafficStats != null ? prevTrafficStats.bytesReceivedWifi : 0);
|
||||
final long mobileSentDiff = trafficStats.bytesSentMobile - (prevTrafficStats != null ? prevTrafficStats.bytesSentMobile : 0);
|
||||
|
@ -1596,7 +1603,7 @@ public abstract class VoIPBaseService extends Service implements SensorEventList
|
|||
}
|
||||
}
|
||||
|
||||
public class SharedUIParams {
|
||||
public static class SharedUIParams {
|
||||
public boolean tapToVideoTooltipWasShowed;
|
||||
public boolean cameraAlertWasShowed;
|
||||
public boolean wasVideoCall;
|
||||
|
|
|
@ -537,6 +537,7 @@ public class VoIPService extends VoIPBaseService {
|
|||
}
|
||||
|
||||
public void acceptIncomingCall() {
|
||||
MessagesController.getInstance(currentAccount).ignoreSetOnline = false;
|
||||
stopRinging();
|
||||
showNotification();
|
||||
configureDeviceForCall();
|
||||
|
@ -1141,7 +1142,7 @@ public class VoIPService extends VoIPBaseService {
|
|||
@Override
|
||||
public void run() {
|
||||
if (tgVoip != null) {
|
||||
updateTrafficStats();
|
||||
updateTrafficStats(null);
|
||||
AndroidUtilities.runOnUIThread(this, 5000);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -151,6 +151,8 @@ public class ChartHeaderView extends FrameLayout {
|
|||
back.setAlpha(1f);
|
||||
back.setTranslationX(0);
|
||||
back.setTranslationY(0);
|
||||
back.setScaleX(1f);
|
||||
back.setScaleY(1f);
|
||||
title.setAlpha(0f);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -8798,7 +8798,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not
|
|||
}
|
||||
updateChatListViewTopPadding();
|
||||
|
||||
if (!firstLoading && !paused && !inPreviewMode && !inScheduleMode) {
|
||||
if (!firstLoading && !paused && !inPreviewMode && !inScheduleMode && !getMessagesController().ignoreSetOnline) {
|
||||
int scheduledRead = 0;
|
||||
if ((maxPositiveUnreadId != Integer.MIN_VALUE || maxNegativeUnreadId != Integer.MAX_VALUE)) {
|
||||
int counterDecrement = 0;
|
||||
|
|
|
@ -27,8 +27,6 @@ import androidx.annotation.NonNull;
|
|||
import androidx.core.content.ContextCompat;
|
||||
import androidx.core.graphics.ColorUtils;
|
||||
|
||||
import com.google.android.exoplayer2.util.Log;
|
||||
|
||||
import org.telegram.messenger.AndroidUtilities;
|
||||
import org.telegram.messenger.R;
|
||||
import org.telegram.ui.Components.CubicBezierInterpolator;
|
||||
|
|
|
@ -201,7 +201,8 @@ public class VoIPPiPView implements VoIPBaseService.StateListener {
|
|||
windowLayoutParams.flags = WindowManager.LayoutParams.FLAG_NOT_FOCUSABLE |
|
||||
WindowManager.LayoutParams.FLAG_HARDWARE_ACCELERATED |
|
||||
WindowManager.LayoutParams.FLAG_LAYOUT_NO_LIMITS |
|
||||
WindowManager.LayoutParams.FLAG_LAYOUT_IN_SCREEN;
|
||||
WindowManager.LayoutParams.FLAG_LAYOUT_IN_SCREEN |
|
||||
WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON;
|
||||
|
||||
return windowLayoutParams;
|
||||
}
|
||||
|
|
|
@ -12,6 +12,7 @@ import android.graphics.PorterDuff;
|
|||
import android.graphics.PorterDuffXfermode;
|
||||
import android.graphics.RectF;
|
||||
import android.os.Build;
|
||||
import android.view.Gravity;
|
||||
import android.view.View;
|
||||
import android.view.ViewOutlineProvider;
|
||||
import android.widget.FrameLayout;
|
||||
|
@ -19,11 +20,10 @@ import android.widget.ImageView;
|
|||
|
||||
import androidx.annotation.NonNull;
|
||||
|
||||
import com.google.android.exoplayer2.util.Log;
|
||||
|
||||
import org.telegram.messenger.ApplicationLoader;
|
||||
import org.telegram.messenger.Utilities;
|
||||
import org.telegram.ui.Components.BackupImageView;
|
||||
import org.telegram.ui.Components.LayoutHelper;
|
||||
import org.webrtc.RendererCommon;
|
||||
import org.webrtc.TextureViewRenderer;
|
||||
|
||||
import java.io.File;
|
||||
|
@ -41,6 +41,7 @@ public class VoIPTextureView extends FrameLayout {
|
|||
|
||||
public final TextureViewRenderer renderer;
|
||||
public final ImageView imageView;
|
||||
public View backgroundView;
|
||||
|
||||
public Bitmap cameraLastBitmap;
|
||||
public float stubVisibleProgress = 1f;
|
||||
|
@ -55,10 +56,23 @@ public class VoIPTextureView extends FrameLayout {
|
|||
super.onFirstFrameRendered();
|
||||
VoIPTextureView.this.invalidate();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void onMeasure(int widthSpec, int heightSpec) {
|
||||
super.onMeasure(widthSpec, heightSpec);
|
||||
}
|
||||
};
|
||||
renderer.setEnableHardwareScaler(true);
|
||||
|
||||
addView(renderer);
|
||||
renderer.setIsCamera(isCamera);
|
||||
if (!isCamera) {
|
||||
backgroundView = new View(context);
|
||||
backgroundView.setBackgroundColor(0xff1b1f23);
|
||||
addView(backgroundView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT));
|
||||
renderer.setScalingType(RendererCommon.ScalingType.SCALE_ASPECT_FIT);
|
||||
addView(renderer, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, Gravity.CENTER));
|
||||
} else {
|
||||
addView(renderer);
|
||||
}
|
||||
addView(imageView);
|
||||
|
||||
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
|
||||
|
|
|
@ -3,13 +3,11 @@ package org.telegram.ui.Components.voip;
|
|||
import android.animation.Animator;
|
||||
import android.animation.AnimatorListenerAdapter;
|
||||
import android.app.Activity;
|
||||
import android.app.KeyguardManager;
|
||||
import android.content.Context;
|
||||
import android.content.pm.ActivityInfo;
|
||||
import android.graphics.PixelFormat;
|
||||
import android.os.Build;
|
||||
import android.view.Gravity;
|
||||
import android.view.KeyEvent;
|
||||
import android.view.MotionEvent;
|
||||
import android.view.VelocityTracker;
|
||||
import android.view.View;
|
||||
|
@ -134,7 +132,7 @@ public class VoIPWindowView extends FrameLayout {
|
|||
try {
|
||||
WindowManager wm = (WindowManager) activity.getSystemService(Context.WINDOW_SERVICE);
|
||||
wm.removeView(VoIPWindowView.this);
|
||||
} catch (Exception e) {
|
||||
} catch (Exception ignore) {
|
||||
|
||||
}
|
||||
} else {
|
||||
|
@ -150,7 +148,7 @@ public class VoIPWindowView extends FrameLayout {
|
|||
setVisibility(View.GONE);
|
||||
try {
|
||||
wm.removeView(VoIPWindowView.this);
|
||||
} catch (Exception e) {
|
||||
} catch (Exception ignore) {
|
||||
|
||||
}
|
||||
}
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -1433,6 +1433,9 @@ public class StatisticActivity extends BaseFragment implements NotificationCente
|
|||
params.date = d;
|
||||
|
||||
int dateIndex = Arrays.binarySearch(data.chartData.x, d);
|
||||
if (dateIndex < 0) {
|
||||
dateIndex = data.chartData.x.length - 1;
|
||||
}
|
||||
params.xPercentage = data.chartData.xPercentage[dateIndex];
|
||||
|
||||
|
||||
|
|
|
@ -571,8 +571,13 @@ public class VoIPFragment implements VoIPBaseService.StateListener, Notification
|
|||
callingUserMiniFloatingLayout.setFloatingMode(true, false);
|
||||
callingUserMiniTextureRenderer = new TextureViewRenderer(context);
|
||||
callingUserMiniTextureRenderer.setEnableHardwareScaler(true);
|
||||
callingUserMiniTextureRenderer.setIsCamera(false);
|
||||
callingUserMiniTextureRenderer.setScalingType(RendererCommon.ScalingType.SCALE_ASPECT_FIT);
|
||||
|
||||
callingUserMiniFloatingLayout.addView(callingUserMiniTextureRenderer);
|
||||
View backgroundView = new View(context);
|
||||
backgroundView.setBackgroundColor(0xff1b1f23);
|
||||
callingUserMiniFloatingLayout.addView(backgroundView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT));
|
||||
callingUserMiniFloatingLayout.addView(callingUserMiniTextureRenderer, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, Gravity.CENTER));
|
||||
callingUserMiniFloatingLayout.setOnTapListener(view -> {
|
||||
if (cameraForceExpanded && System.currentTimeMillis() - lastContentTapTime > 500) {
|
||||
AndroidUtilities.cancelRunOnUIThread(hideUIRunnable);
|
||||
|
@ -622,7 +627,7 @@ public class VoIPFragment implements VoIPBaseService.StateListener, Notification
|
|||
});
|
||||
|
||||
emojiRationalTextView = new TextView(context);
|
||||
emojiRationalTextView.setText(LocaleController.formatString("CallEmojiKeyTooltip", R.string.CallEmojiKeyTooltip, callingUser.first_name));
|
||||
emojiRationalTextView.setText(LocaleController.formatString("CallEmojiKeyTooltip", R.string.CallEmojiKeyTooltip, UserObject.getFirstName(callingUser)));
|
||||
emojiRationalTextView.setTextSize(16);
|
||||
emojiRationalTextView.setTextColor(Color.WHITE);
|
||||
emojiRationalTextView.setGravity(Gravity.CENTER);
|
||||
|
|
|
@ -11,6 +11,7 @@
|
|||
package org.webrtc;
|
||||
|
||||
import android.content.Context;
|
||||
import android.hardware.Camera;
|
||||
import android.os.Handler;
|
||||
import android.os.SystemClock;
|
||||
import java.io.IOException;
|
||||
|
@ -45,6 +46,8 @@ class Camera1Session implements CameraSession {
|
|||
// Used only for stats. Only used on the camera thread.
|
||||
private final long constructionTimeNs; // Construction time of this class.
|
||||
|
||||
private OrientationHelper orientationHelper;
|
||||
|
||||
private SessionState state;
|
||||
private boolean firstFrameReported;
|
||||
|
||||
|
@ -170,6 +173,7 @@ class Camera1Session implements CameraSession {
|
|||
this.info = info;
|
||||
this.captureFormat = captureFormat;
|
||||
this.constructionTimeNs = constructionTimeNs;
|
||||
this.orientationHelper = new OrientationHelper();
|
||||
|
||||
surfaceTextureHelper.setTextureSize(captureFormat.width, captureFormat.height);
|
||||
|
||||
|
@ -218,6 +222,7 @@ class Camera1Session implements CameraSession {
|
|||
} else {
|
||||
listenForBytebufferFrames();
|
||||
}
|
||||
orientationHelper.start();
|
||||
try {
|
||||
camera.startPreview();
|
||||
} catch (RuntimeException e) {
|
||||
|
@ -242,6 +247,9 @@ class Camera1Session implements CameraSession {
|
|||
camera.stopPreview();
|
||||
camera.release();
|
||||
events.onCameraClosed(this);
|
||||
if (orientationHelper != null) {
|
||||
orientationHelper.stop();
|
||||
}
|
||||
Logging.d(TAG, "Stop done");
|
||||
}
|
||||
|
||||
|
@ -313,10 +321,11 @@ class Camera1Session implements CameraSession {
|
|||
}
|
||||
|
||||
private int getFrameOrientation() {
|
||||
int rotation = CameraSession.getDeviceOrientation(applicationContext);
|
||||
if (info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_BACK) {
|
||||
int rotation = orientationHelper.getOrientation();
|
||||
if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
|
||||
rotation = 360 - rotation;
|
||||
}
|
||||
OrientationHelper.cameraRotation = rotation;
|
||||
return (info.orientation + rotation) % 360;
|
||||
}
|
||||
|
||||
|
|
|
@ -24,9 +24,11 @@ import android.os.Handler;
|
|||
import androidx.annotation.Nullable;
|
||||
import android.util.Range;
|
||||
import android.view.Surface;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
|
||||
|
||||
@TargetApi(21)
|
||||
|
@ -53,6 +55,8 @@ class Camera2Session implements CameraSession {
|
|||
private final int height;
|
||||
private final int framerate;
|
||||
|
||||
private OrientationHelper orientationHelper;
|
||||
|
||||
// Initialized at start
|
||||
private CameraCharacteristics cameraCharacteristics;
|
||||
private int cameraOrientation;
|
||||
|
@ -292,6 +296,7 @@ class Camera2Session implements CameraSession {
|
|||
this.width = width;
|
||||
this.height = height;
|
||||
this.framerate = framerate;
|
||||
this.orientationHelper = new OrientationHelper();
|
||||
|
||||
start();
|
||||
}
|
||||
|
@ -306,6 +311,7 @@ class Camera2Session implements CameraSession {
|
|||
reportError("getCameraCharacteristics(): " + e.getMessage());
|
||||
return;
|
||||
}
|
||||
orientationHelper.start();
|
||||
cameraOrientation = cameraCharacteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
|
||||
isCameraFrontFacing = cameraCharacteristics.get(CameraCharacteristics.LENS_FACING)
|
||||
== CameraMetadata.LENS_FACING_FRONT;
|
||||
|
@ -386,6 +392,9 @@ class Camera2Session implements CameraSession {
|
|||
cameraDevice.close();
|
||||
cameraDevice = null;
|
||||
}
|
||||
if (orientationHelper != null) {
|
||||
orientationHelper.stop();
|
||||
}
|
||||
|
||||
Logging.d(TAG, "Stop done");
|
||||
}
|
||||
|
@ -405,10 +414,11 @@ class Camera2Session implements CameraSession {
|
|||
}
|
||||
|
||||
private int getFrameOrientation() {
|
||||
int rotation = CameraSession.getDeviceOrientation(applicationContext);
|
||||
if (!isCameraFrontFacing) {
|
||||
int rotation = orientationHelper.getOrientation();
|
||||
if (isCameraFrontFacing) {
|
||||
rotation = 360 - rotation;
|
||||
}
|
||||
OrientationHelper.cameraRotation = rotation;
|
||||
return (cameraOrientation + rotation) % 360;
|
||||
}
|
||||
|
||||
|
|
|
@ -10,10 +10,7 @@
|
|||
|
||||
package org.webrtc;
|
||||
|
||||
import android.content.Context;
|
||||
import android.graphics.Matrix;
|
||||
import android.view.WindowManager;
|
||||
import android.view.Surface;
|
||||
|
||||
interface CameraSession {
|
||||
enum FailureType { ERROR, DISCONNECTED }
|
||||
|
@ -39,21 +36,6 @@ interface CameraSession {
|
|||
*/
|
||||
void stop();
|
||||
|
||||
static int getDeviceOrientation(Context context) {
|
||||
final WindowManager wm = (WindowManager) context.getSystemService(Context.WINDOW_SERVICE);
|
||||
switch (wm.getDefaultDisplay().getRotation()) {
|
||||
case Surface.ROTATION_90:
|
||||
return 90;
|
||||
case Surface.ROTATION_180:
|
||||
return 180;
|
||||
case Surface.ROTATION_270:
|
||||
return 270;
|
||||
case Surface.ROTATION_0:
|
||||
default:
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
static VideoFrame.TextureBuffer createTextureBufferWithModifiedTransformMatrix(
|
||||
TextureBufferImpl buffer, boolean mirror, int rotation) {
|
||||
final Matrix transformMatrix = new Matrix();
|
||||
|
|
|
@ -150,6 +150,8 @@ public class EglRenderer implements VideoSink {
|
|||
// If true, mirrors the video stream vertically.
|
||||
private boolean mirrorVertically;
|
||||
|
||||
private int rotation;
|
||||
|
||||
// These variables are synchronized on |statisticsLock|.
|
||||
private final Object statisticsLock = new Object();
|
||||
// Total number of video frames received in renderFrame() call.
|
||||
|
@ -532,6 +534,12 @@ public class EglRenderer implements VideoSink {
|
|||
}
|
||||
}
|
||||
|
||||
public void setRotation(int value) {
|
||||
synchronized (layoutLock) {
|
||||
rotation = value;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Release EGL surface. This function will block until the EGL surface is released.
|
||||
*/
|
||||
|
@ -637,7 +645,8 @@ public class EglRenderer implements VideoSink {
|
|||
|
||||
final long startTimeNs = System.nanoTime();
|
||||
|
||||
final float frameAspectRatio = frame.getRotatedWidth() / (float) frame.getRotatedHeight();
|
||||
boolean rotate = Math.abs(rotation) == 90 || Math.abs(rotation) == 270;
|
||||
final float frameAspectRatio = (rotate ? frame.getRotatedHeight() : frame.getRotatedWidth()) / (float) (rotate ? frame.getRotatedWidth() : frame.getRotatedHeight());
|
||||
final float drawnAspectRatio;
|
||||
synchronized (layoutLock) {
|
||||
drawnAspectRatio = layoutAspectRatio != 0f ? layoutAspectRatio : frameAspectRatio;
|
||||
|
@ -656,6 +665,7 @@ public class EglRenderer implements VideoSink {
|
|||
|
||||
drawMatrix.reset();
|
||||
drawMatrix.preTranslate(0.5f, 0.5f);
|
||||
drawMatrix.preRotate(rotation);
|
||||
drawMatrix.preScale(mirrorHorizontally ? -1f : 1f, mirrorVertically ? -1f : 1f);
|
||||
drawMatrix.preScale(scaleX, scaleY);
|
||||
drawMatrix.preTranslate(-0.5f, -0.5f);
|
||||
|
@ -665,7 +675,7 @@ public class EglRenderer implements VideoSink {
|
|||
GLES20.glClearColor(0 /* red */, 0 /* green */, 0 /* blue */, 0 /* alpha */);
|
||||
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
|
||||
frameDrawer.drawFrame(frame, drawer, drawMatrix, 0 /* viewportX */, 0 /* viewportY */,
|
||||
eglBase.surfaceWidth(), eglBase.surfaceHeight());
|
||||
eglBase.surfaceWidth(), eglBase.surfaceHeight(), rotate);
|
||||
|
||||
final long swapBuffersStartTimeNs = System.nanoTime();
|
||||
if (usePresentationTimeStamp) {
|
||||
|
@ -715,6 +725,7 @@ public class EglRenderer implements VideoSink {
|
|||
|
||||
drawMatrix.reset();
|
||||
drawMatrix.preTranslate(0.5f, 0.5f);
|
||||
drawMatrix.preRotate(rotation);
|
||||
drawMatrix.preScale(mirrorHorizontally ? -1f : 1f, mirrorVertically ? -1f : 1f);
|
||||
drawMatrix.preScale(1f, -1f); // We want the output to be upside down for Bitmap.
|
||||
drawMatrix.preTranslate(-0.5f, -0.5f);
|
||||
|
@ -744,7 +755,7 @@ public class EglRenderer implements VideoSink {
|
|||
GLES20.glClearColor(0 /* red */, 0 /* green */, 0 /* blue */, 0 /* alpha */);
|
||||
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
|
||||
frameDrawer.drawFrame(frame, listenerAndParams.drawer, drawMatrix, 0 /* viewportX */,
|
||||
0 /* viewportY */, scaledWidth, scaledHeight);
|
||||
0 /* viewportY */, scaledWidth, scaledHeight, false);
|
||||
|
||||
final ByteBuffer bitmapBuffer = ByteBuffer.allocateDirect(scaledWidth * scaledHeight * 4);
|
||||
GLES20.glViewport(0, 0, scaledWidth, scaledHeight);
|
||||
|
|
|
@ -35,43 +35,66 @@ public class HardwareVideoEncoderFactory implements VideoEncoderFactory {
|
|||
private static final int QCOM_VP8_KEY_FRAME_INTERVAL_ANDROID_M_MS = 20000;
|
||||
private static final int QCOM_VP8_KEY_FRAME_INTERVAL_ANDROID_N_MS = 15000;
|
||||
|
||||
// List of devices with poor H.264 encoder quality.
|
||||
// HW H.264 encoder on below devices has poor bitrate control - actual
|
||||
// bitrates deviates a lot from the target value.
|
||||
private static final List<String> H264_HW_EXCEPTION_MODELS =
|
||||
Arrays.asList("SAMSUNG-SGH-I337", "Nexus 7", "Nexus 4", "Pixel 3 XL", "Pixel 3");
|
||||
|
||||
private static final List<String> VP8_HW_EXCEPTION_MODELS =
|
||||
Arrays.asList("Pixel 3 XL", "Pixel 3");
|
||||
|
||||
@Nullable private final EglBase14.Context sharedContext;
|
||||
private final boolean enableIntelVp8Encoder;
|
||||
private final boolean enableH264HighProfile;
|
||||
@Nullable private final Predicate<MediaCodecInfo> codecAllowedPredicate;
|
||||
|
||||
private static final List<String> H264_HW_EXCEPTION_MODELS =
|
||||
Arrays.asList("samsung-sgh-i337", "nexus7", "nexus4", "pixel3xl", "pixel3");
|
||||
|
||||
private static final List<String> VP8_HW_EXCEPTION_MODELS =
|
||||
Arrays.asList("pixel3xl", "pixel3");
|
||||
|
||||
private static Set<String> HW_EXCEPTION_MODELS = new HashSet<String>() {{
|
||||
add("SM-A310F");
|
||||
add("SM-A310F/DS");
|
||||
add("SM-A310Y");
|
||||
add("SM-A310M");
|
||||
add("SM-G920F");
|
||||
add("SM-G920FD");
|
||||
add("SM-G920FQ");
|
||||
add("SM-G920I");
|
||||
add("SM-G920A");
|
||||
add("SM-G920T");
|
||||
add("SM-G930F");
|
||||
add("SM-G930FD");
|
||||
add("SM-G930W8");
|
||||
add("SM-G930S");
|
||||
add("SM-G930K");
|
||||
add("SM-G930L");
|
||||
add("SM-G935F");
|
||||
add("SM-G935FD");
|
||||
add("SM-G935W8");
|
||||
add("SM-G935S");
|
||||
add("SM-G935K");
|
||||
add("SM-G935L");
|
||||
add("sm-a310f");
|
||||
add("sm-a310f/ds");
|
||||
add("sm-a310y");
|
||||
add("sm-a310m");
|
||||
add("sm-g920f");
|
||||
add("sm-g920fd");
|
||||
add("sm-g920fq");
|
||||
add("sm-g920i");
|
||||
add("sm-g920a");
|
||||
add("sm-g920t");
|
||||
add("sm-g930f");
|
||||
add("sm-g930fd");
|
||||
add("sm-g930w8");
|
||||
add("sm-g930s");
|
||||
add("sm-g930k");
|
||||
add("sm-g930l");
|
||||
add("sm-g935f");
|
||||
add("sm-g935fd");
|
||||
add("sm-g935w8");
|
||||
add("sm-g935s");
|
||||
add("sm-g935k");
|
||||
add("sm-g935l");
|
||||
|
||||
add("i537");
|
||||
add("sgh-i537");
|
||||
add("gt-i9295");
|
||||
add("sgh-i337");
|
||||
add("gt-i9505g");
|
||||
add("gt-i9505");
|
||||
add("gt-i9515");
|
||||
add("f240");
|
||||
add("e980");
|
||||
add("ls980");
|
||||
add("e988");
|
||||
add("e986");
|
||||
add("f240l");
|
||||
add("f240s");
|
||||
add("v9815");
|
||||
add("nx403a");
|
||||
add("f310l");
|
||||
add("f310lr");
|
||||
add("onem7");
|
||||
add("onemax");
|
||||
add("pn071");
|
||||
add("htc6500lvw");
|
||||
add("butterflys");
|
||||
add("mi2s");
|
||||
add("n1");
|
||||
}};
|
||||
|
||||
/**
|
||||
|
@ -226,8 +249,13 @@ public class HardwareVideoEncoderFactory implements VideoEncoderFactory {
|
|||
|
||||
// Returns true if the given MediaCodecInfo indicates a hardware module that is supported on the
|
||||
// current SDK.
|
||||
|
||||
private static String getModel() {
|
||||
return Build.MODEL != null ? Build.MODEL.toLowerCase().replace(" ", "") : "nomodel";
|
||||
}
|
||||
|
||||
private boolean isHardwareSupportedInCurrentSdk(MediaCodecInfo info, VideoCodecMimeType type) {
|
||||
if (HW_EXCEPTION_MODELS.contains(Build.MODEL)) {
|
||||
if (HW_EXCEPTION_MODELS.contains(getModel())) {
|
||||
return false;
|
||||
}
|
||||
switch (type) {
|
||||
|
@ -244,7 +272,7 @@ public class HardwareVideoEncoderFactory implements VideoEncoderFactory {
|
|||
}
|
||||
|
||||
private boolean isHardwareSupportedInCurrentSdkVp8(MediaCodecInfo info) {
|
||||
if (VP8_HW_EXCEPTION_MODELS.contains(Build.MODEL)) {
|
||||
if (VP8_HW_EXCEPTION_MODELS.contains(getModel())) {
|
||||
return false;
|
||||
}
|
||||
String name = info.getName();
|
||||
|
@ -268,7 +296,7 @@ public class HardwareVideoEncoderFactory implements VideoEncoderFactory {
|
|||
|
||||
private boolean isHardwareSupportedInCurrentSdkH264(MediaCodecInfo info) {
|
||||
// First, H264 hardware might perform poorly on this model.
|
||||
if (H264_HW_EXCEPTION_MODELS.contains(Build.MODEL)) {
|
||||
if (H264_HW_EXCEPTION_MODELS.contains(getModel())) {
|
||||
return false;
|
||||
}
|
||||
String name = info.getName();
|
||||
|
|
|
@ -0,0 +1,68 @@
|
|||
package org.webrtc;
|
||||
|
||||
import android.view.OrientationEventListener;
|
||||
|
||||
import org.telegram.messenger.ApplicationLoader;
|
||||
|
||||
public class OrientationHelper {
|
||||
|
||||
private static final int ORIENTATION_HYSTERESIS = 5;
|
||||
private OrientationEventListener orientationEventListener;
|
||||
private int rotation;
|
||||
|
||||
public static volatile int cameraRotation;
|
||||
|
||||
private int roundOrientation(int orientation, int orientationHistory) {
|
||||
boolean changeOrientation;
|
||||
if (orientationHistory == OrientationEventListener.ORIENTATION_UNKNOWN) {
|
||||
changeOrientation = true;
|
||||
} else {
|
||||
int dist = Math.abs(orientation - orientationHistory);
|
||||
dist = Math.min(dist, 360 - dist);
|
||||
changeOrientation = (dist >= 45 + ORIENTATION_HYSTERESIS);
|
||||
}
|
||||
if (changeOrientation) {
|
||||
return ((orientation + 45) / 90 * 90) % 360;
|
||||
}
|
||||
return orientationHistory;
|
||||
}
|
||||
|
||||
public OrientationHelper() {
|
||||
orientationEventListener = new OrientationEventListener(ApplicationLoader.applicationContext) {
|
||||
@Override
|
||||
public void onOrientationChanged(int orientation) {
|
||||
if (orientationEventListener == null || orientation == ORIENTATION_UNKNOWN) {
|
||||
return;
|
||||
}
|
||||
int newOrietation = roundOrientation(orientation, rotation);
|
||||
if (newOrietation != rotation) {
|
||||
onOrientationUpdate(rotation = newOrietation);
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
protected void onOrientationUpdate(int orientation) {
|
||||
|
||||
}
|
||||
|
||||
public void start() {
|
||||
if (orientationEventListener.canDetectOrientation()) {
|
||||
orientationEventListener.enable();
|
||||
} else {
|
||||
orientationEventListener.disable();
|
||||
orientationEventListener = null;
|
||||
}
|
||||
}
|
||||
|
||||
public void stop() {
|
||||
if (orientationEventListener != null) {
|
||||
orientationEventListener.disable();
|
||||
orientationEventListener = null;
|
||||
}
|
||||
}
|
||||
|
||||
public int getOrientation() {
|
||||
return rotation;
|
||||
}
|
||||
}
|
|
@ -88,7 +88,7 @@ public class RendererCommon {
|
|||
this.visibleFractionMismatchOrientation = visibleFractionMismatchOrientation;
|
||||
}
|
||||
|
||||
public Point measure(int widthSpec, int heightSpec, int frameWidth, int frameHeight) {
|
||||
public Point measure(boolean isCamera, int widthSpec, int heightSpec, int frameWidth, int frameHeight) {
|
||||
// Calculate max allowed layout size.
|
||||
final int maxWidth = View.getDefaultSize(Integer.MAX_VALUE, widthSpec);
|
||||
final int maxHeight = View.getDefaultSize(Integer.MAX_VALUE, heightSpec);
|
||||
|
@ -108,7 +108,7 @@ public class RendererCommon {
|
|||
if (View.MeasureSpec.getMode(widthSpec) == View.MeasureSpec.EXACTLY) {
|
||||
layoutSize.x = maxWidth;
|
||||
}
|
||||
if (View.MeasureSpec.getMode(heightSpec) == View.MeasureSpec.EXACTLY) {
|
||||
if (View.MeasureSpec.getMode(heightSpec) == View.MeasureSpec.EXACTLY || !isCamera && (frameAspect > 1.0f) == (displayAspect > 1.0f)) {
|
||||
layoutSize.y = maxHeight;
|
||||
}
|
||||
return layoutSize;
|
||||
|
|
|
@ -190,7 +190,7 @@ public class SurfaceViewRenderer extends SurfaceView
|
|||
protected void onMeasure(int widthSpec, int heightSpec) {
|
||||
ThreadUtils.checkIsOnMainThread();
|
||||
Point size =
|
||||
videoLayoutMeasure.measure(widthSpec, heightSpec, rotatedFrameWidth, rotatedFrameHeight);
|
||||
videoLayoutMeasure.measure(true, widthSpec, heightSpec, rotatedFrameWidth, rotatedFrameHeight);
|
||||
setMeasuredDimension(size.x, size.y);
|
||||
logD("onMeasure(). New size: " + size.x + "x" + size.y);
|
||||
}
|
||||
|
|
|
@ -6,6 +6,7 @@ import android.graphics.Point;
|
|||
import android.graphics.SurfaceTexture;
|
||||
import android.os.Looper;
|
||||
import android.view.TextureView;
|
||||
import android.view.View;
|
||||
|
||||
import org.telegram.messenger.AndroidUtilities;
|
||||
|
||||
|
@ -31,6 +32,9 @@ public class TextureViewRenderer extends TextureView
|
|||
private boolean enableFixedSize;
|
||||
private int surfaceWidth;
|
||||
private int surfaceHeight;
|
||||
private boolean isCamera;
|
||||
|
||||
private OrientationHelper orientationHelper;
|
||||
|
||||
public static class TextureEglRenderer extends EglRenderer implements TextureView.SurfaceTextureListener {
|
||||
private static final String TAG = "TextureEglRenderer";
|
||||
|
@ -218,6 +222,9 @@ public class TextureViewRenderer extends TextureView
|
|||
*/
|
||||
public void release() {
|
||||
eglRenderer.release();
|
||||
if (orientationHelper != null) {
|
||||
orientationHelper.stop();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -251,6 +258,19 @@ public class TextureViewRenderer extends TextureView
|
|||
eglRenderer.removeFrameListener(listener);
|
||||
}
|
||||
|
||||
public void setIsCamera(boolean value) {
|
||||
isCamera = value;
|
||||
if (!isCamera) {
|
||||
orientationHelper = new OrientationHelper() {
|
||||
@Override
|
||||
protected void onOrientationUpdate(int orientation) {
|
||||
updateRotation();
|
||||
}
|
||||
};
|
||||
orientationHelper.start();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Enables fixed size for the surface. This provides better performance but might be buggy on some
|
||||
* devices. By default this is turned off.
|
||||
|
@ -261,6 +281,45 @@ public class TextureViewRenderer extends TextureView
|
|||
updateSurfaceSize();
|
||||
}
|
||||
|
||||
private void updateRotation() {
|
||||
if (orientationHelper == null || rotatedFrameWidth == 0 || rotatedFrameHeight == 0) {
|
||||
return;
|
||||
}
|
||||
View parentView = (View) getParent();
|
||||
if (parentView == null) {
|
||||
return;
|
||||
}
|
||||
int orientation = orientationHelper.getOrientation();
|
||||
float viewWidth = getMeasuredWidth();
|
||||
float viewHeight = getMeasuredHeight();
|
||||
float w;
|
||||
float h;
|
||||
float targetWidth = parentView.getMeasuredWidth();
|
||||
float targetHeight = parentView.getMeasuredHeight();
|
||||
if (orientation == 90 || orientation == 270) {
|
||||
w = viewHeight;
|
||||
h = viewWidth;
|
||||
} else {
|
||||
w = viewWidth;
|
||||
h = viewHeight;
|
||||
}
|
||||
float scale;
|
||||
if (w < h) {
|
||||
scale = Math.max(w / viewWidth, h / viewHeight);
|
||||
} else {
|
||||
scale = Math.min(w / viewWidth, h / viewHeight);
|
||||
}
|
||||
w *= scale;
|
||||
h *= scale;
|
||||
if (Math.abs(w / h - targetWidth / targetHeight) < 0.1f) {
|
||||
scale *= Math.max(targetWidth / w, targetHeight / h);
|
||||
}
|
||||
if (orientation == 270) {
|
||||
orientation = -90;
|
||||
}
|
||||
animate().scaleX(scale).scaleY(scale).rotation(-orientation).setDuration(180).start();
|
||||
}
|
||||
|
||||
/**
|
||||
* Set if the video stream should be mirrored or not.
|
||||
*/
|
||||
|
@ -312,8 +371,11 @@ public class TextureViewRenderer extends TextureView
|
|||
@Override
|
||||
protected void onMeasure(int widthSpec, int heightSpec) {
|
||||
ThreadUtils.checkIsOnMainThread();
|
||||
Point size = videoLayoutMeasure.measure(widthSpec, heightSpec, rotatedFrameWidth, rotatedFrameHeight);
|
||||
Point size = videoLayoutMeasure.measure(isCamera, widthSpec, heightSpec, rotatedFrameWidth, rotatedFrameHeight);
|
||||
setMeasuredDimension(size.x, size.y);
|
||||
if (!isCamera) {
|
||||
updateRotation();
|
||||
}
|
||||
logD("onMeasure(). New size: " + size.x + "x" + size.y);
|
||||
}
|
||||
|
||||
|
@ -337,7 +399,7 @@ public class TextureViewRenderer extends TextureView
|
|||
drawnFrameHeight = rotatedFrameHeight;
|
||||
} else {
|
||||
drawnFrameWidth = rotatedFrameWidth;
|
||||
drawnFrameHeight = (int) (rotatedFrameWidth / layoutAspectRatio);
|
||||
drawnFrameHeight = (int) (rotatedFrameHeight / layoutAspectRatio);
|
||||
}
|
||||
// Aspect ratio of the drawn frame and the view is the same.
|
||||
final int width = Math.min(getWidth(), drawnFrameWidth);
|
||||
|
@ -413,6 +475,9 @@ public class TextureViewRenderer extends TextureView
|
|||
if (rendererEvents != null) {
|
||||
rendererEvents.onFrameResolutionChanged(videoWidth, videoHeight, rotation);
|
||||
}
|
||||
if (isCamera) {
|
||||
eglRenderer.setRotation(-OrientationHelper.cameraRotation);
|
||||
}
|
||||
int rotatedWidth = rotation == 0 || rotation == 180 ? videoWidth : videoHeight;
|
||||
int rotatedHeight = rotation == 0 || rotation == 180 ? videoHeight : videoWidth;
|
||||
// run immediately if possible for ui thread tests
|
||||
|
|
|
@ -182,14 +182,14 @@ public class VideoFrameDrawer {
|
|||
public void drawFrame(
|
||||
VideoFrame frame, RendererCommon.GlDrawer drawer, Matrix additionalRenderMatrix) {
|
||||
drawFrame(frame, drawer, additionalRenderMatrix, 0 /* viewportX */, 0 /* viewportY */,
|
||||
frame.getRotatedWidth(), frame.getRotatedHeight());
|
||||
frame.getRotatedWidth(), frame.getRotatedHeight(), false);
|
||||
}
|
||||
|
||||
public void drawFrame(VideoFrame frame, RendererCommon.GlDrawer drawer,
|
||||
@Nullable Matrix additionalRenderMatrix, int viewportX, int viewportY, int viewportWidth,
|
||||
int viewportHeight) {
|
||||
final int width = frame.getRotatedWidth();
|
||||
final int height = frame.getRotatedHeight();
|
||||
int viewportHeight, boolean rotate) {
|
||||
final int width = rotate ? frame.getRotatedHeight() : frame.getRotatedWidth();
|
||||
final int height = rotate ? frame.getRotatedWidth() : frame.getRotatedHeight();
|
||||
calculateTransformedRenderSize(width, height, additionalRenderMatrix);
|
||||
if (renderWidth <= 0 || renderHeight <= 0) {
|
||||
Logging.w(TAG, "Illegal frame size: " + renderWidth + "x" + renderHeight);
|
||||
|
|
Loading…
Reference in a new issue